var/home/core/zuul-output/0000755000175000017500000000000015114107327014526 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114121510015460 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005322157315114121500017673 0ustar rootrootDec 03 19:29:42 crc systemd[1]: Starting Kubernetes Kubelet... Dec 03 19:29:42 crc restorecon[4675]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 19:29:42 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:43 crc restorecon[4675]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 19:29:43 crc restorecon[4675]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 03 19:29:44 crc kubenswrapper[4916]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 19:29:44 crc kubenswrapper[4916]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 03 19:29:44 crc kubenswrapper[4916]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 19:29:44 crc kubenswrapper[4916]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 19:29:44 crc kubenswrapper[4916]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 03 19:29:44 crc kubenswrapper[4916]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.192035 4916 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197516 4916 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197544 4916 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197550 4916 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197556 4916 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197588 4916 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197595 4916 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197601 4916 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197607 4916 feature_gate.go:330] unrecognized feature gate: Example Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197613 4916 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197619 4916 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197625 4916 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197630 4916 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197642 4916 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197647 4916 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197653 4916 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197658 4916 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197663 4916 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197668 4916 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197674 4916 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197679 4916 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197684 4916 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197689 4916 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197695 4916 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197700 4916 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197705 4916 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197710 4916 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197715 4916 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197721 4916 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197726 4916 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197731 4916 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197736 4916 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197742 4916 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197748 4916 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197753 4916 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197758 4916 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197764 4916 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197771 4916 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197778 4916 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197790 4916 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197798 4916 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197805 4916 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197811 4916 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197816 4916 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197821 4916 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197826 4916 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197832 4916 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197837 4916 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197842 4916 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197848 4916 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197856 4916 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197862 4916 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197869 4916 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197875 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197881 4916 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197888 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197895 4916 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197902 4916 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197908 4916 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197914 4916 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197920 4916 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197926 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197933 4916 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197942 4916 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197949 4916 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197955 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197962 4916 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197970 4916 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197976 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197983 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197989 4916 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.197994 4916 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198294 4916 flags.go:64] FLAG: --address="0.0.0.0" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198309 4916 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198318 4916 flags.go:64] FLAG: --anonymous-auth="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198328 4916 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198337 4916 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198344 4916 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198353 4916 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198363 4916 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198370 4916 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198378 4916 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198386 4916 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198394 4916 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198401 4916 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198408 4916 flags.go:64] FLAG: --cgroup-root="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198416 4916 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198423 4916 flags.go:64] FLAG: --client-ca-file="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198431 4916 flags.go:64] FLAG: --cloud-config="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198438 4916 flags.go:64] FLAG: --cloud-provider="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198444 4916 flags.go:64] FLAG: --cluster-dns="[]" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198452 4916 flags.go:64] FLAG: --cluster-domain="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198458 4916 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198467 4916 flags.go:64] FLAG: --config-dir="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198475 4916 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198483 4916 flags.go:64] FLAG: --container-log-max-files="5" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198493 4916 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198500 4916 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198508 4916 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198515 4916 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198523 4916 flags.go:64] FLAG: --contention-profiling="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198530 4916 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198539 4916 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198547 4916 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198555 4916 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198610 4916 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198619 4916 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198627 4916 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198634 4916 flags.go:64] FLAG: --enable-load-reader="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198642 4916 flags.go:64] FLAG: --enable-server="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198649 4916 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198661 4916 flags.go:64] FLAG: --event-burst="100" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198669 4916 flags.go:64] FLAG: --event-qps="50" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198677 4916 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198685 4916 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198692 4916 flags.go:64] FLAG: --eviction-hard="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198701 4916 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198708 4916 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198715 4916 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198723 4916 flags.go:64] FLAG: --eviction-soft="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198730 4916 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198737 4916 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198744 4916 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198750 4916 flags.go:64] FLAG: --experimental-mounter-path="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198757 4916 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198764 4916 flags.go:64] FLAG: --fail-swap-on="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198771 4916 flags.go:64] FLAG: --feature-gates="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198780 4916 flags.go:64] FLAG: --file-check-frequency="20s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198787 4916 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198795 4916 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198803 4916 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198811 4916 flags.go:64] FLAG: --healthz-port="10248" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198818 4916 flags.go:64] FLAG: --help="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198825 4916 flags.go:64] FLAG: --hostname-override="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198832 4916 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198839 4916 flags.go:64] FLAG: --http-check-frequency="20s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198847 4916 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198854 4916 flags.go:64] FLAG: --image-credential-provider-config="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198861 4916 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198868 4916 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198875 4916 flags.go:64] FLAG: --image-service-endpoint="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198882 4916 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198889 4916 flags.go:64] FLAG: --kube-api-burst="100" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198896 4916 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198904 4916 flags.go:64] FLAG: --kube-api-qps="50" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198911 4916 flags.go:64] FLAG: --kube-reserved="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198918 4916 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198927 4916 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198935 4916 flags.go:64] FLAG: --kubelet-cgroups="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198942 4916 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198949 4916 flags.go:64] FLAG: --lock-file="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198957 4916 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198965 4916 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198972 4916 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198983 4916 flags.go:64] FLAG: --log-json-split-stream="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198990 4916 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.198997 4916 flags.go:64] FLAG: --log-text-split-stream="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199004 4916 flags.go:64] FLAG: --logging-format="text" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199011 4916 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199018 4916 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199025 4916 flags.go:64] FLAG: --manifest-url="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199032 4916 flags.go:64] FLAG: --manifest-url-header="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199042 4916 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199051 4916 flags.go:64] FLAG: --max-open-files="1000000" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199060 4916 flags.go:64] FLAG: --max-pods="110" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199100 4916 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199108 4916 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199115 4916 flags.go:64] FLAG: --memory-manager-policy="None" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199122 4916 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199130 4916 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199137 4916 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199144 4916 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199162 4916 flags.go:64] FLAG: --node-status-max-images="50" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199168 4916 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199175 4916 flags.go:64] FLAG: --oom-score-adj="-999" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199182 4916 flags.go:64] FLAG: --pod-cidr="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199189 4916 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199202 4916 flags.go:64] FLAG: --pod-manifest-path="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199209 4916 flags.go:64] FLAG: --pod-max-pids="-1" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199216 4916 flags.go:64] FLAG: --pods-per-core="0" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199224 4916 flags.go:64] FLAG: --port="10250" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199231 4916 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199238 4916 flags.go:64] FLAG: --provider-id="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199247 4916 flags.go:64] FLAG: --qos-reserved="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199255 4916 flags.go:64] FLAG: --read-only-port="10255" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199263 4916 flags.go:64] FLAG: --register-node="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199271 4916 flags.go:64] FLAG: --register-schedulable="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199279 4916 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199293 4916 flags.go:64] FLAG: --registry-burst="10" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199301 4916 flags.go:64] FLAG: --registry-qps="5" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199308 4916 flags.go:64] FLAG: --reserved-cpus="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199315 4916 flags.go:64] FLAG: --reserved-memory="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199332 4916 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199340 4916 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199348 4916 flags.go:64] FLAG: --rotate-certificates="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199357 4916 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199365 4916 flags.go:64] FLAG: --runonce="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199373 4916 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199381 4916 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199389 4916 flags.go:64] FLAG: --seccomp-default="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199396 4916 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199404 4916 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199412 4916 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199420 4916 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199427 4916 flags.go:64] FLAG: --storage-driver-password="root" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199434 4916 flags.go:64] FLAG: --storage-driver-secure="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199442 4916 flags.go:64] FLAG: --storage-driver-table="stats" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199449 4916 flags.go:64] FLAG: --storage-driver-user="root" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199456 4916 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199464 4916 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199472 4916 flags.go:64] FLAG: --system-cgroups="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199479 4916 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199491 4916 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199499 4916 flags.go:64] FLAG: --tls-cert-file="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199506 4916 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199517 4916 flags.go:64] FLAG: --tls-min-version="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199525 4916 flags.go:64] FLAG: --tls-private-key-file="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199532 4916 flags.go:64] FLAG: --topology-manager-policy="none" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199538 4916 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199545 4916 flags.go:64] FLAG: --topology-manager-scope="container" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199551 4916 flags.go:64] FLAG: --v="2" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199559 4916 flags.go:64] FLAG: --version="false" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199588 4916 flags.go:64] FLAG: --vmodule="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199596 4916 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.199602 4916 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199768 4916 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199776 4916 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199784 4916 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199790 4916 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199795 4916 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199801 4916 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199806 4916 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199811 4916 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199819 4916 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199826 4916 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199832 4916 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199838 4916 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199844 4916 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199849 4916 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199855 4916 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199860 4916 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199866 4916 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199871 4916 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199877 4916 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199883 4916 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199888 4916 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199895 4916 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199900 4916 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199906 4916 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199911 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199916 4916 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199921 4916 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199926 4916 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199933 4916 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199940 4916 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199948 4916 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199954 4916 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199961 4916 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199968 4916 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199977 4916 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199986 4916 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.199994 4916 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200001 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200007 4916 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200013 4916 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200021 4916 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200026 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200032 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200038 4916 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200044 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200049 4916 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200054 4916 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200059 4916 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200065 4916 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200070 4916 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200075 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200080 4916 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200085 4916 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200091 4916 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200097 4916 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200102 4916 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200107 4916 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200112 4916 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200117 4916 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200123 4916 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200128 4916 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200133 4916 feature_gate.go:330] unrecognized feature gate: Example Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200139 4916 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200144 4916 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200149 4916 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200154 4916 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200160 4916 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200166 4916 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200171 4916 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200176 4916 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.200181 4916 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.200191 4916 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.211836 4916 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.211936 4916 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212136 4916 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212188 4916 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212199 4916 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212209 4916 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212217 4916 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212225 4916 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212234 4916 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212242 4916 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212281 4916 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212289 4916 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212297 4916 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212305 4916 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212313 4916 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212321 4916 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212329 4916 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212370 4916 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212378 4916 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212386 4916 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212394 4916 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212402 4916 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212414 4916 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212424 4916 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212465 4916 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212478 4916 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212488 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212498 4916 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212509 4916 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212545 4916 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212554 4916 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212600 4916 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212609 4916 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212620 4916 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212629 4916 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212639 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212647 4916 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212655 4916 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212693 4916 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212701 4916 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212709 4916 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212717 4916 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212725 4916 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212733 4916 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212740 4916 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212779 4916 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212787 4916 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212794 4916 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212802 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212810 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212820 4916 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212828 4916 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212836 4916 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212875 4916 feature_gate.go:330] unrecognized feature gate: Example Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212883 4916 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212892 4916 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212900 4916 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212908 4916 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212916 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212924 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212961 4916 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212970 4916 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212977 4916 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212985 4916 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.212993 4916 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213001 4916 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213009 4916 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213047 4916 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213055 4916 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213062 4916 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213070 4916 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213078 4916 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213086 4916 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.213100 4916 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213439 4916 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213454 4916 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213494 4916 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213503 4916 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213512 4916 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213520 4916 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213528 4916 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213536 4916 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213545 4916 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213609 4916 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213617 4916 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213626 4916 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213635 4916 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213643 4916 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213651 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213660 4916 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213698 4916 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213706 4916 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213714 4916 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213722 4916 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213733 4916 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213744 4916 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213752 4916 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213793 4916 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213804 4916 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213813 4916 feature_gate.go:330] unrecognized feature gate: Example Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213821 4916 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213831 4916 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213839 4916 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213847 4916 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213885 4916 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213893 4916 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213901 4916 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213910 4916 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213919 4916 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213927 4916 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213966 4916 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213981 4916 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.213992 4916 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214001 4916 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214009 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214017 4916 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214025 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214063 4916 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214071 4916 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214079 4916 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214087 4916 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214095 4916 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214102 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214110 4916 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214147 4916 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214155 4916 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214163 4916 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214171 4916 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214179 4916 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214187 4916 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214195 4916 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214204 4916 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214244 4916 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214253 4916 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214261 4916 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214268 4916 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214276 4916 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214284 4916 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214291 4916 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214330 4916 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214340 4916 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214348 4916 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214356 4916 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214364 4916 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.214372 4916 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.214383 4916 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.214783 4916 server.go:940] "Client rotation is on, will bootstrap in background" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.219254 4916 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.219389 4916 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.220187 4916 server.go:997] "Starting client certificate rotation" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.220232 4916 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.220709 4916 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-25 23:08:19.831526825 +0000 UTC Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.220832 4916 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.271805 4916 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.273813 4916 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.323881 4916 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.333464 4916 log.go:25] "Validated CRI v1 runtime API" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.367468 4916 log.go:25] "Validated CRI v1 image API" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.369391 4916 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.371481 4916 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-03-19-25-03-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.371535 4916 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.400345 4916 manager.go:217] Machine: {Timestamp:2025-12-03 19:29:44.398983208 +0000 UTC m=+0.361793484 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f BootID:41f46b78-490e-42ba-85e2-5e59d1446fea Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:2b:df:1a Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:2b:df:1a Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:93:1e:eb Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:a4:45:c6 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:6f:16:a1 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:ee:57:b8 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:5a:7d:2a:79:b1:14 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:0a:f1:44:23:02:4d Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.400706 4916 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.400895 4916 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.401986 4916 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.402442 4916 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.402523 4916 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.403065 4916 topology_manager.go:138] "Creating topology manager with none policy" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.403092 4916 container_manager_linux.go:303] "Creating device plugin manager" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.403523 4916 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.403651 4916 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.404098 4916 state_mem.go:36] "Initialized new in-memory state store" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.404300 4916 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.405460 4916 kubelet.go:418] "Attempting to sync node with API server" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.405506 4916 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.405540 4916 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.405604 4916 kubelet.go:324] "Adding apiserver pod source" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.405632 4916 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.407445 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.407492 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.407609 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.407536 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.408320 4916 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.408758 4916 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.409617 4916 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410170 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410190 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410197 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410203 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410214 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410221 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410232 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410244 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410252 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410258 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410269 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410276 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410299 4916 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410740 4916 server.go:1280] "Started kubelet" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410869 4916 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410987 4916 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.410941 4916 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 03 19:29:44 crc systemd[1]: Started Kubernetes Kubelet. Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.413316 4916 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.414589 4916 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.175:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187dcb528ce58a62 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 19:29:44.410712674 +0000 UTC m=+0.373522940,LastTimestamp:2025-12-03 19:29:44.410712674 +0000 UTC m=+0.373522940,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.415710 4916 server.go:460] "Adding debug handlers to kubelet server" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.417063 4916 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.417226 4916 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 16:04:45.875881677 +0000 UTC Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.417297 4916 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 188h35m1.458588158s for next certificate rotation Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.417330 4916 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.417434 4916 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.417449 4916 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.417506 4916 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.417596 4916 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.420017 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="200ms" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.420949 4916 factory.go:55] Registering systemd factory Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.421010 4916 factory.go:221] Registration of the systemd container factory successfully Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.423887 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.423957 4916 factory.go:153] Registering CRI-O factory Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.423974 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.423990 4916 factory.go:221] Registration of the crio container factory successfully Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.424080 4916 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.424130 4916 factory.go:103] Registering Raw factory Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.424148 4916 manager.go:1196] Started watching for new ooms in manager Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.424920 4916 manager.go:319] Starting recovery of all containers Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.429811 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.429883 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.429897 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.429909 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.429920 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.429932 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.429944 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.429956 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.429971 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.429983 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.429994 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430007 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430020 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430035 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430046 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430059 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430071 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430083 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430095 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430106 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430141 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430152 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430163 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430197 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430210 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430223 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430237 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430249 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430261 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430273 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430283 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430294 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430304 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430315 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430325 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430335 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430345 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430355 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430366 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430376 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430387 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430398 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430407 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430433 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430446 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430456 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430466 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430477 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430487 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430497 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430507 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430539 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430553 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430578 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430591 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430603 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430614 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430626 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430638 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430655 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430668 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430679 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430691 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430701 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430715 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430726 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430738 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430749 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430761 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430771 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430783 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430796 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430807 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430818 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430829 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430840 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430869 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430880 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430890 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430901 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430914 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430925 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430936 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430947 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430958 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430970 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430981 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.430993 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431004 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431015 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431025 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431035 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431047 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431058 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431069 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431079 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431092 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431105 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431117 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431127 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431139 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431151 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431164 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.431177 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434703 4916 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434742 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434762 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434777 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434797 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434810 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434823 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434836 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434850 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434865 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434878 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434891 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434906 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434918 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434930 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434942 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434955 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.434994 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435008 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435022 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435034 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435051 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435062 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435074 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435086 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435098 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435109 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435120 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435131 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435143 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435155 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435167 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435180 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435192 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435204 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435217 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435230 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435242 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435254 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435265 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435277 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435288 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435301 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435312 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435353 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435366 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435378 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435390 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435402 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435413 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435427 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435438 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435450 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435461 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435473 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435485 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435520 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435532 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435544 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435556 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435589 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435603 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435616 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435628 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435639 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435651 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435663 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435675 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435694 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435707 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435721 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435732 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435744 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435754 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435766 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435777 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435788 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435798 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435832 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435843 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435854 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435867 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435878 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435891 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435904 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435917 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435928 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435940 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435954 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435967 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435978 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.435990 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436002 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436013 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436028 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436040 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436052 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436064 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436076 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436086 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436099 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436111 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436130 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436140 4916 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436152 4916 reconstruct.go:97] "Volume reconstruction finished" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.436162 4916 reconciler.go:26] "Reconciler: start to sync state" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.462337 4916 manager.go:324] Recovery completed Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.475130 4916 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.476600 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.476616 4916 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.476777 4916 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.476810 4916 kubelet.go:2335] "Starting kubelet main sync loop" Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.476963 4916 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.478197 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.478303 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.478508 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.478600 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.478625 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.481202 4916 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.481237 4916 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.481261 4916 state_mem.go:36] "Initialized new in-memory state store" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.490282 4916 policy_none.go:49] "None policy: Start" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.491221 4916 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.491266 4916 state_mem.go:35] "Initializing new in-memory state store" Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.519887 4916 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.553253 4916 manager.go:334] "Starting Device Plugin manager" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.553329 4916 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.553347 4916 server.go:79] "Starting device plugin registration server" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.554304 4916 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.554331 4916 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.554511 4916 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.554721 4916 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.554739 4916 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.564683 4916 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.577905 4916 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.578006 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.580122 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.580151 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.580165 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.580273 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.580975 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.581006 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.581070 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.581114 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.581125 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.581302 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.582050 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.582117 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.584373 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.584431 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.584444 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.584763 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.584824 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.584837 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.584778 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.584941 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.584986 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.585939 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.586099 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.586182 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.587224 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.587273 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.587292 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.587472 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.587679 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.587748 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.588862 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.588907 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.588926 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.590179 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.590252 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.590269 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.590665 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.592014 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.593063 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.593157 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.593199 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.594499 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.594544 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.594590 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.621434 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="400ms" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639656 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639720 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639739 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639756 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639774 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639800 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639818 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639834 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639852 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639869 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639883 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.639951 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.640038 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.640093 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.640120 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.654790 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.655992 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.656047 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.656064 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.656102 4916 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.656657 4916 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.175:6443: connect: connection refused" node="crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.741839 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.741904 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.741922 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.741941 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.741984 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742001 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742015 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742031 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742046 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742061 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742076 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742095 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742080 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742155 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742156 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742103 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742198 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742250 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742112 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742271 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742261 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742327 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742389 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742318 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742248 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742372 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742297 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742513 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742632 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.742510 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.856953 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.858828 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.858853 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.858861 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.858882 4916 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 19:29:44 crc kubenswrapper[4916]: E1203 19:29:44.859211 4916 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.175:6443: connect: connection refused" node="crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.922729 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.947802 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.950481 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-2ee6f80105484d3c409fbafebb9d91b60c3b7e4c7a5d08d5fb11ecd6633c76f2 WatchSource:0}: Error finding container 2ee6f80105484d3c409fbafebb9d91b60c3b7e4c7a5d08d5fb11ecd6633c76f2: Status 404 returned error can't find the container with id 2ee6f80105484d3c409fbafebb9d91b60c3b7e4c7a5d08d5fb11ecd6633c76f2 Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.961860 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.977700 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: I1203 19:29:44.985977 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 19:29:44 crc kubenswrapper[4916]: W1203 19:29:44.987432 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-b9ad1dc1ccdd5bd0d8687389a4e1aced549752ef3d5dae709f0fcd4450d0278f WatchSource:0}: Error finding container b9ad1dc1ccdd5bd0d8687389a4e1aced549752ef3d5dae709f0fcd4450d0278f: Status 404 returned error can't find the container with id b9ad1dc1ccdd5bd0d8687389a4e1aced549752ef3d5dae709f0fcd4450d0278f Dec 03 19:29:45 crc kubenswrapper[4916]: W1203 19:29:45.008897 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-c1c5cc90a013ce7d48a5ecb97f0d0bca89e824d1da632003831545598e2b20e6 WatchSource:0}: Error finding container c1c5cc90a013ce7d48a5ecb97f0d0bca89e824d1da632003831545598e2b20e6: Status 404 returned error can't find the container with id c1c5cc90a013ce7d48a5ecb97f0d0bca89e824d1da632003831545598e2b20e6 Dec 03 19:29:45 crc kubenswrapper[4916]: W1203 19:29:45.013549 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-e70250706fba4bfcb7972b179bb7f4ec3a2ce7a27ac1da975e8d819fb471e822 WatchSource:0}: Error finding container e70250706fba4bfcb7972b179bb7f4ec3a2ce7a27ac1da975e8d819fb471e822: Status 404 returned error can't find the container with id e70250706fba4bfcb7972b179bb7f4ec3a2ce7a27ac1da975e8d819fb471e822 Dec 03 19:29:45 crc kubenswrapper[4916]: E1203 19:29:45.023278 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="800ms" Dec 03 19:29:45 crc kubenswrapper[4916]: I1203 19:29:45.260230 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:45 crc kubenswrapper[4916]: I1203 19:29:45.261987 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:45 crc kubenswrapper[4916]: I1203 19:29:45.262031 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:45 crc kubenswrapper[4916]: I1203 19:29:45.262042 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:45 crc kubenswrapper[4916]: I1203 19:29:45.262069 4916 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 19:29:45 crc kubenswrapper[4916]: E1203 19:29:45.262448 4916 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.175:6443: connect: connection refused" node="crc" Dec 03 19:29:45 crc kubenswrapper[4916]: W1203 19:29:45.289811 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:45 crc kubenswrapper[4916]: E1203 19:29:45.289902 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:45 crc kubenswrapper[4916]: I1203 19:29:45.412439 4916 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:45 crc kubenswrapper[4916]: W1203 19:29:45.455447 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:45 crc kubenswrapper[4916]: E1203 19:29:45.455524 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:45 crc kubenswrapper[4916]: W1203 19:29:45.471343 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:45 crc kubenswrapper[4916]: E1203 19:29:45.471418 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:45 crc kubenswrapper[4916]: I1203 19:29:45.480891 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e70250706fba4bfcb7972b179bb7f4ec3a2ce7a27ac1da975e8d819fb471e822"} Dec 03 19:29:45 crc kubenswrapper[4916]: I1203 19:29:45.481798 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7a6cda22b62a6ceb853715640fb632c9aa2cfe53e0496586d5b9f31e45ddab8a"} Dec 03 19:29:45 crc kubenswrapper[4916]: I1203 19:29:45.482646 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c1c5cc90a013ce7d48a5ecb97f0d0bca89e824d1da632003831545598e2b20e6"} Dec 03 19:29:45 crc kubenswrapper[4916]: I1203 19:29:45.483340 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b9ad1dc1ccdd5bd0d8687389a4e1aced549752ef3d5dae709f0fcd4450d0278f"} Dec 03 19:29:45 crc kubenswrapper[4916]: I1203 19:29:45.484159 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"2ee6f80105484d3c409fbafebb9d91b60c3b7e4c7a5d08d5fb11ecd6633c76f2"} Dec 03 19:29:45 crc kubenswrapper[4916]: E1203 19:29:45.824911 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="1.6s" Dec 03 19:29:45 crc kubenswrapper[4916]: W1203 19:29:45.953197 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:45 crc kubenswrapper[4916]: E1203 19:29:45.953287 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.063349 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.065258 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.065297 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.065305 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.065328 4916 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 19:29:46 crc kubenswrapper[4916]: E1203 19:29:46.065808 4916 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.175:6443: connect: connection refused" node="crc" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.400159 4916 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 19:29:46 crc kubenswrapper[4916]: E1203 19:29:46.402210 4916 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.412723 4916 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.490863 4916 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb" exitCode=0 Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.490988 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb"} Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.491191 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.492465 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.492528 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.492684 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.495349 4916 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61" exitCode=0 Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.495462 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61"} Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.495665 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.496512 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.497656 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.497711 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.497734 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.497990 4916 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d" exitCode=0 Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.498062 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d"} Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.498152 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.499914 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.499930 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.499958 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.500028 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.500103 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.500140 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.502327 4916 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604" exitCode=0 Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.502427 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604"} Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.502442 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.503457 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.503505 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.503526 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.510705 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995"} Dec 03 19:29:46 crc kubenswrapper[4916]: I1203 19:29:46.510974 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074"} Dec 03 19:29:47 crc kubenswrapper[4916]: W1203 19:29:47.394661 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:47 crc kubenswrapper[4916]: E1203 19:29:47.394789 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.413070 4916 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:47 crc kubenswrapper[4916]: E1203 19:29:47.426635 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="3.2s" Dec 03 19:29:47 crc kubenswrapper[4916]: W1203 19:29:47.446234 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:47 crc kubenswrapper[4916]: E1203 19:29:47.446338 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.516597 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0"} Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.516646 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd"} Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.516664 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a"} Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.520329 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf"} Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.520373 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d"} Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.520507 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.522305 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.522369 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.522388 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.527895 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741"} Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.528163 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd"} Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.530377 4916 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd" exitCode=0 Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.530425 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd"} Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.531912 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.533161 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.533206 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.533221 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.535910 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"89e751bd44282d50876324e02cf4f3eea3dbc28a7c3ba136fe3259fae0ce0098"} Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.536096 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.537532 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.537651 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.537766 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.666681 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.667791 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.667848 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.667861 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:47 crc kubenswrapper[4916]: I1203 19:29:47.667883 4916 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 19:29:47 crc kubenswrapper[4916]: E1203 19:29:47.668405 4916 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.175:6443: connect: connection refused" node="crc" Dec 03 19:29:47 crc kubenswrapper[4916]: W1203 19:29:47.719179 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.175:6443: connect: connection refused Dec 03 19:29:47 crc kubenswrapper[4916]: E1203 19:29:47.719291 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.175:6443: connect: connection refused" logger="UnhandledError" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.542918 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694"} Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.543308 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73"} Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.545306 4916 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1" exitCode=0 Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.545406 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.545458 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.545524 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.545451 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1"} Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.545607 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547103 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547130 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547139 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547417 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547448 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547451 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547469 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547478 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547482 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547589 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547623 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:48 crc kubenswrapper[4916]: I1203 19:29:48.547639 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.133089 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.551468 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5"} Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.551635 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.552488 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.552520 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.552532 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.555005 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.555314 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73"} Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.555348 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560"} Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.555696 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.555717 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.555724 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:49 crc kubenswrapper[4916]: I1203 19:29:49.906499 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.563610 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087"} Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.563677 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080"} Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.563697 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c"} Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.563742 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.563765 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.563786 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.563833 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.565820 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.565857 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.565903 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.565970 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.566008 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.566028 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.565830 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.566116 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.566132 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.612658 4916 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.869179 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.871072 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.871125 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.871138 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:50 crc kubenswrapper[4916]: I1203 19:29:50.871171 4916 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 19:29:51 crc kubenswrapper[4916]: I1203 19:29:51.566004 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:51 crc kubenswrapper[4916]: I1203 19:29:51.566120 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:51 crc kubenswrapper[4916]: I1203 19:29:51.567000 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:51 crc kubenswrapper[4916]: I1203 19:29:51.567032 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:51 crc kubenswrapper[4916]: I1203 19:29:51.567044 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:51 crc kubenswrapper[4916]: I1203 19:29:51.568009 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:51 crc kubenswrapper[4916]: I1203 19:29:51.568061 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:51 crc kubenswrapper[4916]: I1203 19:29:51.568078 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:51 crc kubenswrapper[4916]: I1203 19:29:51.775851 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:52 crc kubenswrapper[4916]: I1203 19:29:52.229017 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:29:52 crc kubenswrapper[4916]: I1203 19:29:52.568856 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:52 crc kubenswrapper[4916]: I1203 19:29:52.570039 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:52 crc kubenswrapper[4916]: I1203 19:29:52.570098 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:52 crc kubenswrapper[4916]: I1203 19:29:52.570115 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:52 crc kubenswrapper[4916]: I1203 19:29:52.907285 4916 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 19:29:52 crc kubenswrapper[4916]: I1203 19:29:52.907397 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 19:29:53 crc kubenswrapper[4916]: I1203 19:29:53.571510 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:53 crc kubenswrapper[4916]: I1203 19:29:53.573069 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:53 crc kubenswrapper[4916]: I1203 19:29:53.573138 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:53 crc kubenswrapper[4916]: I1203 19:29:53.573164 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.004331 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.004597 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.005868 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.005921 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.005938 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:54 crc kubenswrapper[4916]: E1203 19:29:54.564806 4916 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.692402 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.692659 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.695835 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.695881 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.695898 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.698102 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.840630 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.840901 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.842762 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.842809 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.842829 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.978391 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.978688 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.980197 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.980274 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:54 crc kubenswrapper[4916]: I1203 19:29:54.980295 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:55 crc kubenswrapper[4916]: I1203 19:29:55.575523 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:55 crc kubenswrapper[4916]: I1203 19:29:55.576332 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:55 crc kubenswrapper[4916]: I1203 19:29:55.576368 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:55 crc kubenswrapper[4916]: I1203 19:29:55.576381 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:55 crc kubenswrapper[4916]: I1203 19:29:55.582319 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:29:56 crc kubenswrapper[4916]: I1203 19:29:56.577813 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:56 crc kubenswrapper[4916]: I1203 19:29:56.579004 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:56 crc kubenswrapper[4916]: I1203 19:29:56.579048 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:56 crc kubenswrapper[4916]: I1203 19:29:56.579059 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:57 crc kubenswrapper[4916]: I1203 19:29:57.506686 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 03 19:29:57 crc kubenswrapper[4916]: I1203 19:29:57.506923 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:29:57 crc kubenswrapper[4916]: I1203 19:29:57.508649 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:29:57 crc kubenswrapper[4916]: I1203 19:29:57.508700 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:29:57 crc kubenswrapper[4916]: I1203 19:29:57.508723 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:29:58 crc kubenswrapper[4916]: W1203 19:29:58.238730 4916 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 03 19:29:58 crc kubenswrapper[4916]: I1203 19:29:58.238809 4916 trace.go:236] Trace[642434213]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 19:29:48.237) (total time: 10001ms): Dec 03 19:29:58 crc kubenswrapper[4916]: Trace[642434213]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:29:58.238) Dec 03 19:29:58 crc kubenswrapper[4916]: Trace[642434213]: [10.001131654s] [10.001131654s] END Dec 03 19:29:58 crc kubenswrapper[4916]: E1203 19:29:58.238829 4916 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 03 19:29:58 crc kubenswrapper[4916]: I1203 19:29:58.413531 4916 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 03 19:29:58 crc kubenswrapper[4916]: I1203 19:29:58.734669 4916 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 19:29:58 crc kubenswrapper[4916]: I1203 19:29:58.734732 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 19:29:58 crc kubenswrapper[4916]: I1203 19:29:58.742219 4916 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 19:29:58 crc kubenswrapper[4916]: I1203 19:29:58.742299 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.234250 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.234449 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.235498 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.235533 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.235542 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.235581 4916 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.235626 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.238624 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.594747 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.595361 4916 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.595593 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.596106 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.596315 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.596470 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.908470 4916 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 19:30:02 crc kubenswrapper[4916]: I1203 19:30:02.908559 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 19:30:03 crc kubenswrapper[4916]: E1203 19:30:03.736921 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Dec 03 19:30:03 crc kubenswrapper[4916]: I1203 19:30:03.739049 4916 trace.go:236] Trace[1757588893]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 19:29:51.165) (total time: 12573ms): Dec 03 19:30:03 crc kubenswrapper[4916]: Trace[1757588893]: ---"Objects listed" error: 12573ms (19:30:03.738) Dec 03 19:30:03 crc kubenswrapper[4916]: Trace[1757588893]: [12.573250849s] [12.573250849s] END Dec 03 19:30:03 crc kubenswrapper[4916]: I1203 19:30:03.739072 4916 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 19:30:03 crc kubenswrapper[4916]: I1203 19:30:03.739359 4916 trace.go:236] Trace[1688512044]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 19:29:53.065) (total time: 10673ms): Dec 03 19:30:03 crc kubenswrapper[4916]: Trace[1688512044]: ---"Objects listed" error: 10673ms (19:30:03.739) Dec 03 19:30:03 crc kubenswrapper[4916]: Trace[1688512044]: [10.673869749s] [10.673869749s] END Dec 03 19:30:03 crc kubenswrapper[4916]: I1203 19:30:03.739385 4916 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 19:30:03 crc kubenswrapper[4916]: I1203 19:30:03.743384 4916 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 03 19:30:03 crc kubenswrapper[4916]: E1203 19:30:03.743592 4916 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 03 19:30:03 crc kubenswrapper[4916]: I1203 19:30:03.743817 4916 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 03 19:30:03 crc kubenswrapper[4916]: I1203 19:30:03.744719 4916 trace.go:236] Trace[751280664]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 19:29:52.793) (total time: 10951ms): Dec 03 19:30:03 crc kubenswrapper[4916]: Trace[751280664]: ---"Objects listed" error: 10950ms (19:30:03.744) Dec 03 19:30:03 crc kubenswrapper[4916]: Trace[751280664]: [10.951028759s] [10.951028759s] END Dec 03 19:30:03 crc kubenswrapper[4916]: I1203 19:30:03.744744 4916 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 19:30:03 crc kubenswrapper[4916]: I1203 19:30:03.760134 4916 csr.go:261] certificate signing request csr-ht6jh is approved, waiting to be issued Dec 03 19:30:03 crc kubenswrapper[4916]: I1203 19:30:03.766463 4916 csr.go:257] certificate signing request csr-ht6jh is issued Dec 03 19:30:03 crc kubenswrapper[4916]: I1203 19:30:03.928719 4916 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.221799 4916 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.221984 4916 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": read tcp 38.102.83.175:60796->38.102.83.175:6443: use of closed network connection" event="&Event{ObjectMeta:{kube-apiserver-crc.187dcb533dc84cbd openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-syncer},},Reason:Created,Message:Created container kube-apiserver-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 19:29:47.378363581 +0000 UTC m=+3.341173887,LastTimestamp:2025-12-03 19:29:47.378363581 +0000 UTC m=+3.341173887,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 19:30:04 crc kubenswrapper[4916]: W1203 19:30:04.222081 4916 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Service ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Dec 03 19:30:04 crc kubenswrapper[4916]: W1203 19:30:04.222085 4916 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Node ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Dec 03 19:30:04 crc kubenswrapper[4916]: W1203 19:30:04.222095 4916 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.CSIDriver ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Dec 03 19:30:04 crc kubenswrapper[4916]: W1203 19:30:04.222252 4916 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.RuntimeClass ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.415316 4916 apiserver.go:52] "Watching apiserver" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.419330 4916 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.419636 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.420004 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.420186 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.420219 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.420296 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.420338 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.420502 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.420623 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.420738 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.420765 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.423242 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.423461 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.423813 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.424122 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.424270 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.424378 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.424432 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.424614 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.424669 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.443469 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.447414 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.447682 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.447706 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.447726 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.447744 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.447766 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.447789 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.447807 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.447834 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.449240 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.450153 4916 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.450548 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.457044 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.466399 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.466726 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.466753 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.466766 4916 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.466818 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:04.966802355 +0000 UTC m=+20.929612621 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.470008 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.480168 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.480204 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.480218 4916 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.480274 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:04.980254333 +0000 UTC m=+20.943064599 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.480363 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.496190 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.504323 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.516540 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.520369 4916 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.527114 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.528220 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-fcbx4"] Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.528555 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-fcbx4" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.532850 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.533070 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.533181 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.542888 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548437 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548480 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548500 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548520 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548536 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548551 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548586 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548606 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548621 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548635 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548651 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548665 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548680 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548695 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548713 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548735 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548778 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548793 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548811 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548854 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548851 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548864 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548870 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548935 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548959 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548976 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548992 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548987 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.548987 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549154 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549173 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549202 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549212 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549231 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549333 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549410 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549452 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549474 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549620 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549645 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549652 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549808 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549900 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549895 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549009 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549954 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.549985 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550009 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550028 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550065 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550075 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550105 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550125 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550140 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550175 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550209 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550245 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550275 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550278 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550301 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550307 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550365 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550367 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550403 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550419 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550435 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550445 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550453 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550499 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550525 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550530 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550551 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550597 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550610 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550622 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550646 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550652 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550670 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550698 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550720 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550784 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550808 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550831 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550849 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550855 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550893 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550919 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550941 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550954 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550962 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.550985 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551007 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551013 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551029 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551054 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551079 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551103 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551125 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551151 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551153 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551175 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551197 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551223 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551246 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551268 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551291 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551318 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551342 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551365 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551389 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551412 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551436 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551458 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551484 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551505 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551525 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551543 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551558 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551591 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551611 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551631 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551653 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551677 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551697 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551731 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551754 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551776 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551798 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551818 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551841 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551864 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551885 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551925 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551943 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551958 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551981 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552000 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552016 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552030 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552044 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552059 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552076 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552093 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552108 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552124 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552139 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552155 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552173 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552190 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552207 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552223 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552237 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552252 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552267 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552283 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552299 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552314 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552330 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552345 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552430 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552455 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552478 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552498 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552523 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552580 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552599 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552617 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552635 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552652 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552667 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552682 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552697 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552711 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552726 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552742 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552764 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552786 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552804 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552841 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552859 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552876 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552890 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552905 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552921 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552935 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552953 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552970 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552986 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553005 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553021 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553037 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553053 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553069 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553085 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553102 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553120 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553135 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553151 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553167 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553184 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553202 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553254 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553271 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553287 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553309 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553324 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553339 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553356 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553373 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553390 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553415 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553436 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553460 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553482 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553503 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553519 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553535 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553551 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554114 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554140 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554162 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554234 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554255 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554271 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554288 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554305 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554321 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554602 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554632 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554658 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554813 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554849 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554876 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554910 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554937 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554962 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555128 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555148 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555162 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555177 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555190 4916 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555203 4916 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555216 4916 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555229 4916 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555242 4916 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555256 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555270 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555284 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555298 4916 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555312 4916 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555325 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555337 4916 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555350 4916 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555364 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555377 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555390 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555404 4916 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555419 4916 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555432 4916 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555444 4916 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555456 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555467 4916 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555479 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555493 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555505 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555518 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555531 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555545 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555558 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555592 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.555606 4916 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.565661 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.570079 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.570899 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551954 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.551993 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552047 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552161 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552267 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552316 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552496 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552583 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552584 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.552734 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553197 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553337 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553499 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553705 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.553754 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554167 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554186 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.554851 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.556440 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.556460 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.560739 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.560896 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.561137 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.561147 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.561331 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.561504 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.561609 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:30:05.061588799 +0000 UTC m=+21.024399065 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.579475 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.580297 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.580520 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.580550 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.580903 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.580985 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.562357 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.562488 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.562798 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.563836 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.564066 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.564517 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.564541 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.564666 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.564981 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.565105 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.565322 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.583347 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.565731 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.565905 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.565993 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.566166 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.566605 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.566600 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.566621 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.566935 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.566968 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.567109 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.567419 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.567463 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.567990 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.583535 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.568014 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.568102 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.568444 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.568629 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.569110 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.583593 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.569053 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.569183 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.583619 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.569197 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.569251 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.569299 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.570131 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.570183 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.570403 4916 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.583740 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:05.083721732 +0000 UTC m=+21.046531998 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.570938 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.571009 4916 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:04 crc kubenswrapper[4916]: E1203 19:30:04.583791 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:05.083784114 +0000 UTC m=+21.046594370 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.571437 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.571878 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.572217 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.572091 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.573089 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.573138 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.573173 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.573210 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.573386 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.573404 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.573681 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.573903 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.574146 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.574221 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.574248 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.574411 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.574451 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.574682 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.574926 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.574979 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.574945 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.575005 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.575435 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.583942 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.575484 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.574784 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.575656 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.575842 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.575965 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.576082 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.576201 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.576250 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.576318 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.576542 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.576428 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.576591 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.576904 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.577054 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.577346 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.577635 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.577648 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.577750 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.578104 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.578080 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.578233 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.578599 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.578662 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.578701 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.578748 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.578972 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.579125 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.583986 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.584117 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.583986 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.584035 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.584278 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.584412 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.584451 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.584479 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.584589 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.585068 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.585166 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.585206 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.585214 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.585373 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.585581 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.585858 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.586012 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.587367 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.585633 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.587487 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.588619 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.589030 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.589679 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.590116 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.590122 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.590646 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.590924 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.591114 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.591142 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.591017 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.591391 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.594473 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.597993 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.598619 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.598744 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.602054 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.604118 4916 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5" exitCode=255 Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.604166 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5"} Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.609409 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.613810 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.616070 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.619630 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.633707 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.633904 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.634519 4916 scope.go:117] "RemoveContainer" containerID="3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.639593 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.645172 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656714 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656776 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkkqp\" (UniqueName: \"kubernetes.io/projected/1aa2c320-e4a7-4032-a519-e36ba11108e3-kube-api-access-tkkqp\") pod \"node-resolver-fcbx4\" (UID: \"1aa2c320-e4a7-4032-a519-e36ba11108e3\") " pod="openshift-dns/node-resolver-fcbx4" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656796 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1aa2c320-e4a7-4032-a519-e36ba11108e3-hosts-file\") pod \"node-resolver-fcbx4\" (UID: \"1aa2c320-e4a7-4032-a519-e36ba11108e3\") " pod="openshift-dns/node-resolver-fcbx4" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656789 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656834 4916 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656844 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656854 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656863 4916 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656872 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656880 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656888 4916 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656896 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656904 4916 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656914 4916 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656922 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656931 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656939 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656947 4916 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656955 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656963 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656971 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656972 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.656979 4916 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657004 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657013 4916 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657022 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657030 4916 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657039 4916 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657047 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657055 4916 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657064 4916 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657081 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657088 4916 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657096 4916 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657104 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657112 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657120 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657128 4916 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657136 4916 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657144 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657153 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657161 4916 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657170 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657177 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657185 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657193 4916 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657202 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657223 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657231 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657239 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657248 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657255 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657264 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657272 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657280 4916 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657289 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657297 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657304 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657312 4916 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657320 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657327 4916 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657335 4916 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657343 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657351 4916 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657358 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657365 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657373 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657382 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657390 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657398 4916 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657406 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657414 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657421 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657429 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657437 4916 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657444 4916 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657452 4916 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657460 4916 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657468 4916 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657475 4916 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657483 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657491 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657499 4916 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657507 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657515 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657524 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657533 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657541 4916 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657549 4916 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657557 4916 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657592 4916 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657602 4916 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657610 4916 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657617 4916 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657625 4916 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657634 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657642 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657649 4916 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657657 4916 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657664 4916 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657673 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657680 4916 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657689 4916 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657703 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657713 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657723 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657734 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657744 4916 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657755 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657764 4916 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657773 4916 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657780 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657788 4916 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657796 4916 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657803 4916 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657811 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657819 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657827 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657834 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657843 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657852 4916 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657860 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657870 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657881 4916 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657892 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657903 4916 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657913 4916 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657922 4916 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657931 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657943 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657954 4916 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657965 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657974 4916 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657985 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.657995 4916 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658005 4916 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658015 4916 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658025 4916 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658034 4916 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658043 4916 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658054 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658065 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658076 4916 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658088 4916 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658099 4916 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658110 4916 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658121 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658133 4916 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658144 4916 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658155 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658166 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658175 4916 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658185 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658196 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658206 4916 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658216 4916 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658227 4916 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658238 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658249 4916 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658260 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658271 4916 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658279 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658287 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.658295 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.675702 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.689502 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.707727 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.719835 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.734136 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.735195 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.744527 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.750144 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.758679 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkkqp\" (UniqueName: \"kubernetes.io/projected/1aa2c320-e4a7-4032-a519-e36ba11108e3-kube-api-access-tkkqp\") pod \"node-resolver-fcbx4\" (UID: \"1aa2c320-e4a7-4032-a519-e36ba11108e3\") " pod="openshift-dns/node-resolver-fcbx4" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.758728 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1aa2c320-e4a7-4032-a519-e36ba11108e3-hosts-file\") pod \"node-resolver-fcbx4\" (UID: \"1aa2c320-e4a7-4032-a519-e36ba11108e3\") " pod="openshift-dns/node-resolver-fcbx4" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.758827 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1aa2c320-e4a7-4032-a519-e36ba11108e3-hosts-file\") pod \"node-resolver-fcbx4\" (UID: \"1aa2c320-e4a7-4032-a519-e36ba11108e3\") " pod="openshift-dns/node-resolver-fcbx4" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.764046 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.766086 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.767863 4916 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-03 19:25:03 +0000 UTC, rotation deadline is 2026-10-08 18:00:54.665427104 +0000 UTC Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.767965 4916 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7414h30m49.897469029s for next certificate rotation Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.775708 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.780650 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkkqp\" (UniqueName: \"kubernetes.io/projected/1aa2c320-e4a7-4032-a519-e36ba11108e3-kube-api-access-tkkqp\") pod \"node-resolver-fcbx4\" (UID: \"1aa2c320-e4a7-4032-a519-e36ba11108e3\") " pod="openshift-dns/node-resolver-fcbx4" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.792769 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.801749 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.809076 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.836487 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.839903 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-fcbx4" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.849890 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.870414 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.889457 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.904295 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.922654 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-q4hms"] Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.923077 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.924118 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-4vkgz"] Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.924319 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-4vkgz" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.924675 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-kp7gb"] Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.925206 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.925324 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.925581 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.925699 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.925820 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.925986 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.930020 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.930068 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.930149 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.930243 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.930533 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.930838 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.933456 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.943442 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.957240 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.960823 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-rootfs\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.960864 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-mcd-auth-proxy-config\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.960881 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzj2v\" (UniqueName: \"kubernetes.io/projected/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-kube-api-access-pzj2v\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.960918 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-proxy-tls\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.968891 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:04 crc kubenswrapper[4916]: I1203 19:30:04.983793 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.000219 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.014773 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.027818 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.039512 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.049443 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.060184 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.061453 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.061508 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-cnibin\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.061530 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-os-release\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.061560 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.061612 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-rootfs\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.061638 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-etc-kubernetes\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.061664 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-daemon-config\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.061670 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.061708 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.061720 4916 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.061830 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-rootfs\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.061889 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22dsk\" (UniqueName: \"kubernetes.io/projected/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-kube-api-access-22dsk\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.061962 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:06.061916979 +0000 UTC m=+22.024727245 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.062006 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.062495 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.062533 4916 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.062638 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:06.062610507 +0000 UTC m=+22.025420853 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.062785 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2a1fc2b9-c813-42d4-badd-f1f81e57b667-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.062821 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzj2v\" (UniqueName: \"kubernetes.io/projected/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-kube-api-access-pzj2v\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.062852 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-var-lib-kubelet\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.062868 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-run-multus-certs\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.062884 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-run-netns\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.062898 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-hostroot\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.062916 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwshh\" (UniqueName: \"kubernetes.io/projected/2a1fc2b9-c813-42d4-badd-f1f81e57b667-kube-api-access-cwshh\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.062951 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-cni-binary-copy\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.062968 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-run-k8s-cni-cncf-io\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.062986 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-proxy-tls\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063005 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-socket-dir-parent\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063019 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-conf-dir\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063034 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-cnibin\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063051 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-system-cni-dir\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063067 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-system-cni-dir\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063085 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063105 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-mcd-auth-proxy-config\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063120 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-cni-dir\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063148 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-var-lib-cni-bin\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063165 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2a1fc2b9-c813-42d4-badd-f1f81e57b667-cni-binary-copy\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063184 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-var-lib-cni-multus\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.063199 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-os-release\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.064664 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-mcd-auth-proxy-config\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.068092 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-proxy-tls\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.071539 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.079433 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzj2v\" (UniqueName: \"kubernetes.io/projected/5cc773ef-1b60-461f-a7ac-2b8a23a1d04f-kube-api-access-pzj2v\") pod \"machine-config-daemon-q4hms\" (UID: \"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\") " pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.082706 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.094723 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.105878 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.115459 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.125335 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.136748 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.145860 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.154084 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.163673 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.163772 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.163800 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-cni-binary-copy\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.163824 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-run-k8s-cni-cncf-io\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.163848 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-socket-dir-parent\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.163902 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-conf-dir\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.163924 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-cnibin\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.163925 4916 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.163948 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-system-cni-dir\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.163958 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-run-k8s-cni-cncf-io\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.163977 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-system-cni-dir\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.164005 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:06.163985453 +0000 UTC m=+22.126795809 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164026 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164048 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-system-cni-dir\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164073 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-conf-dir\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164047 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-cni-dir\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164121 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164139 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-var-lib-cni-bin\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164138 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-system-cni-dir\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164140 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-socket-dir-parent\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164154 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2a1fc2b9-c813-42d4-badd-f1f81e57b667-cni-binary-copy\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164189 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-cni-dir\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164048 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-cnibin\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164221 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-var-lib-cni-multus\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164252 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-var-lib-cni-multus\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164276 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-var-lib-cni-bin\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.164297 4916 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164302 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-os-release\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.164351 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:06.164336292 +0000 UTC m=+22.127146558 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164391 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-cnibin\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164412 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-os-release\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164421 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-tuning-conf-dir\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164439 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-etc-kubernetes\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164450 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2a1fc2b9-c813-42d4-badd-f1f81e57b667-os-release\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164465 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-daemon-config\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164497 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22dsk\" (UniqueName: \"kubernetes.io/projected/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-kube-api-access-22dsk\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164505 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-os-release\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164522 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2a1fc2b9-c813-42d4-badd-f1f81e57b667-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164547 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-var-lib-kubelet\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164585 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-run-multus-certs\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164593 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-cnibin\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164611 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-etc-kubernetes\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164620 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-run-netns\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164601 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-run-netns\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164641 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-var-lib-kubelet\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164647 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-hostroot\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164665 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-hostroot\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164664 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-host-run-multus-certs\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.164666 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwshh\" (UniqueName: \"kubernetes.io/projected/2a1fc2b9-c813-42d4-badd-f1f81e57b667-kube-api-access-cwshh\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.165133 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 19:30:05 crc kubenswrapper[4916]: E1203 19:30:05.165319 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:30:06.165305927 +0000 UTC m=+22.128116193 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.165472 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2a1fc2b9-c813-42d4-badd-f1f81e57b667-cni-binary-copy\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.165506 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-cni-binary-copy\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.165577 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-multus-daemon-config\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.166374 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2a1fc2b9-c813-42d4-badd-f1f81e57b667-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.185123 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22dsk\" (UniqueName: \"kubernetes.io/projected/d75c407a-2bbd-4cc3-bc0e-b1010aeeab57-kube-api-access-22dsk\") pod \"multus-4vkgz\" (UID: \"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\") " pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.188936 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwshh\" (UniqueName: \"kubernetes.io/projected/2a1fc2b9-c813-42d4-badd-f1f81e57b667-kube-api-access-cwshh\") pod \"multus-additional-cni-plugins-kp7gb\" (UID: \"2a1fc2b9-c813-42d4-badd-f1f81e57b667\") " pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.253216 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:30:05 crc kubenswrapper[4916]: W1203 19:30:05.265858 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cc773ef_1b60_461f_a7ac_2b8a23a1d04f.slice/crio-2c15dee797a8ebf64d5282feac99e2959f576e4a8c35d20d56a1cda6af8d494a WatchSource:0}: Error finding container 2c15dee797a8ebf64d5282feac99e2959f576e4a8c35d20d56a1cda6af8d494a: Status 404 returned error can't find the container with id 2c15dee797a8ebf64d5282feac99e2959f576e4a8c35d20d56a1cda6af8d494a Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.278786 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-4vkgz" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.289546 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.292497 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.297517 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-c9jfr"] Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.298274 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.302641 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.302688 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.302742 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.302791 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.302824 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.304151 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 19:30:05 crc kubenswrapper[4916]: W1203 19:30:05.316528 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a1fc2b9_c813_42d4_badd_f1f81e57b667.slice/crio-43c819e7cc89f8ede72127a446f7239a08aaea81f4691d8e07f985f99ac9be82 WatchSource:0}: Error finding container 43c819e7cc89f8ede72127a446f7239a08aaea81f4691d8e07f985f99ac9be82: Status 404 returned error can't find the container with id 43c819e7cc89f8ede72127a446f7239a08aaea81f4691d8e07f985f99ac9be82 Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.318839 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.352049 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366142 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-ovn-kubernetes\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366187 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-env-overrides\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366211 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-netns\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366232 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-log-socket\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366251 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-netd\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366271 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-etc-openvswitch\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366288 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-slash\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366304 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-ovn\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366328 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-config\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366342 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc7lq\" (UniqueName: \"kubernetes.io/projected/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-kube-api-access-pc7lq\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366357 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-node-log\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366370 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-var-lib-openvswitch\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366383 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-script-lib\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366398 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-systemd\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366413 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366428 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovn-node-metrics-cert\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366449 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-kubelet\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366463 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-systemd-units\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366484 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-openvswitch\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.366498 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-bin\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.392650 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.425798 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467250 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467302 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovn-node-metrics-cert\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467325 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-systemd\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467358 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-kubelet\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467378 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-systemd-units\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467401 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-openvswitch\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467419 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-bin\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467457 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-ovn-kubernetes\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467481 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-env-overrides\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467501 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-netns\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467520 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-log-socket\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467543 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-netd\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467585 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-etc-openvswitch\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467615 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-slash\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467638 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-ovn\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467672 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-config\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467685 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-openvswitch\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467710 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-systemd\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467696 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc7lq\" (UniqueName: \"kubernetes.io/projected/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-kube-api-access-pc7lq\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467778 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467786 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-node-log\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467817 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-script-lib\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467840 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-var-lib-openvswitch\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467862 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-bin\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467906 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-var-lib-openvswitch\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467911 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-ovn-kubernetes\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467942 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-kubelet\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467929 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.467972 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-systemd-units\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.468000 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-node-log\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.468038 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-etc-openvswitch\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.468075 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-netns\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.468101 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-log-socket\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.468124 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-netd\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.468146 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-ovn\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.468165 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-slash\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.468604 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-env-overrides\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.468803 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-script-lib\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.468907 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-config\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.471262 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovn-node-metrics-cert\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.504154 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc7lq\" (UniqueName: \"kubernetes.io/projected/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-kube-api-access-pc7lq\") pod \"ovnkube-node-c9jfr\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.523114 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.574853 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.606232 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.607322 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4vkgz" event={"ID":"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57","Type":"ContainerStarted","Data":"18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.607356 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4vkgz" event={"ID":"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57","Type":"ContainerStarted","Data":"31f97cce6163611c2c07ea5dcd4c984bc0fcdadf34bf8352fe7720843711c5a3"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.608707 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-fcbx4" event={"ID":"1aa2c320-e4a7-4032-a519-e36ba11108e3","Type":"ContainerStarted","Data":"1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.608734 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-fcbx4" event={"ID":"1aa2c320-e4a7-4032-a519-e36ba11108e3","Type":"ContainerStarted","Data":"ac158ae6a60dac4317db07769211744476d32acf6d64e9bbe0b06e1d6793a3e3"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.610235 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.610268 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.610281 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b424d5d53236045fca97ee897e47b625d05f06eb97aa9bd58c219375207a8286"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.611708 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.611736 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"681f2682e6198365b3e149d7428eeb2df4c9655b8285834f4bf70849fa946322"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.613004 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.613031 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"2c15dee797a8ebf64d5282feac99e2959f576e4a8c35d20d56a1cda6af8d494a"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.613788 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4ebf605289a1e32739d41a0c3b1d414856191519e398e4df5a219d9bc8a339b3"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.614480 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.614659 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" event={"ID":"2a1fc2b9-c813-42d4-badd-f1f81e57b667","Type":"ContainerStarted","Data":"43c819e7cc89f8ede72127a446f7239a08aaea81f4691d8e07f985f99ac9be82"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.616507 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.618122 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4"} Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.618438 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.662441 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.684400 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.729813 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.764753 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: W1203 19:30:05.790728 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod990ba077_9bb2_4ab0_b098_c4c6fd6f4f18.slice/crio-61a946cddb2bdef40b7add5dca513fde134d581e8ab48797a05a0e85fcf0b49a WatchSource:0}: Error finding container 61a946cddb2bdef40b7add5dca513fde134d581e8ab48797a05a0e85fcf0b49a: Status 404 returned error can't find the container with id 61a946cddb2bdef40b7add5dca513fde134d581e8ab48797a05a0e85fcf0b49a Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.816742 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.862838 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.891754 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.922176 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:05 crc kubenswrapper[4916]: I1203 19:30:05.964104 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:05Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.004931 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.045584 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.075586 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.075629 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.075746 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.075763 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.075774 4916 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.075826 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:08.075813312 +0000 UTC m=+24.038623578 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.075859 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.075902 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.075924 4916 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.076009 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:08.075983006 +0000 UTC m=+24.038793332 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.094359 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.126615 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.166405 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.176108 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.176226 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.176276 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.176306 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:30:08.176285864 +0000 UTC m=+24.139096130 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.176387 4916 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.176416 4916 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.176439 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:08.176426228 +0000 UTC m=+24.139236494 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.176454 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:08.176448109 +0000 UTC m=+24.139258375 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.204516 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.255391 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.286342 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.478074 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.478113 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.478113 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.478621 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.478697 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:06 crc kubenswrapper[4916]: E1203 19:30:06.478712 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.481887 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.482641 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.483339 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.484914 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.485724 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.486371 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.488356 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.489061 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.490280 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.490951 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.491961 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.492628 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.493626 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.494254 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.494906 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.495807 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.496388 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.497328 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.497884 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.498953 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.500742 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.501531 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.502079 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.503348 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.504229 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.506364 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.507289 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.508474 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.509598 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.511141 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.512091 4916 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.512297 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.515130 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.515888 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.516379 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.518393 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.519966 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.520660 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.522183 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.523192 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.525044 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.525775 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.527270 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.528509 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.529135 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.529803 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.531161 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.533393 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.534020 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.534591 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.536335 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.537190 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.539103 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.539749 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.622487 4916 generic.go:334] "Generic (PLEG): container finished" podID="2a1fc2b9-c813-42d4-badd-f1f81e57b667" containerID="220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a" exitCode=0 Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.622576 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" event={"ID":"2a1fc2b9-c813-42d4-badd-f1f81e57b667","Type":"ContainerDied","Data":"220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a"} Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.624721 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6"} Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.625960 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77" exitCode=0 Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.626530 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77"} Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.626580 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"61a946cddb2bdef40b7add5dca513fde134d581e8ab48797a05a0e85fcf0b49a"} Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.644272 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.663066 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.683143 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.703122 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.727298 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.758930 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.781917 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.790783 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-tpt4n"] Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.791077 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-tpt4n" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.803353 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.803398 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.803440 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.803483 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.807579 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.826766 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.840211 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.856034 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.869607 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.882349 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d06fba7e-f02e-4eee-8907-405a69b5f7d5-serviceca\") pod \"node-ca-tpt4n\" (UID: \"d06fba7e-f02e-4eee-8907-405a69b5f7d5\") " pod="openshift-image-registry/node-ca-tpt4n" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.882385 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5hql\" (UniqueName: \"kubernetes.io/projected/d06fba7e-f02e-4eee-8907-405a69b5f7d5-kube-api-access-s5hql\") pod \"node-ca-tpt4n\" (UID: \"d06fba7e-f02e-4eee-8907-405a69b5f7d5\") " pod="openshift-image-registry/node-ca-tpt4n" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.882415 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d06fba7e-f02e-4eee-8907-405a69b5f7d5-host\") pod \"node-ca-tpt4n\" (UID: \"d06fba7e-f02e-4eee-8907-405a69b5f7d5\") " pod="openshift-image-registry/node-ca-tpt4n" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.884817 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.929540 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.963843 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:06Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.982868 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d06fba7e-f02e-4eee-8907-405a69b5f7d5-serviceca\") pod \"node-ca-tpt4n\" (UID: \"d06fba7e-f02e-4eee-8907-405a69b5f7d5\") " pod="openshift-image-registry/node-ca-tpt4n" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.982928 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5hql\" (UniqueName: \"kubernetes.io/projected/d06fba7e-f02e-4eee-8907-405a69b5f7d5-kube-api-access-s5hql\") pod \"node-ca-tpt4n\" (UID: \"d06fba7e-f02e-4eee-8907-405a69b5f7d5\") " pod="openshift-image-registry/node-ca-tpt4n" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.982957 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d06fba7e-f02e-4eee-8907-405a69b5f7d5-host\") pod \"node-ca-tpt4n\" (UID: \"d06fba7e-f02e-4eee-8907-405a69b5f7d5\") " pod="openshift-image-registry/node-ca-tpt4n" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.983062 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d06fba7e-f02e-4eee-8907-405a69b5f7d5-host\") pod \"node-ca-tpt4n\" (UID: \"d06fba7e-f02e-4eee-8907-405a69b5f7d5\") " pod="openshift-image-registry/node-ca-tpt4n" Dec 03 19:30:06 crc kubenswrapper[4916]: I1203 19:30:06.984955 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/d06fba7e-f02e-4eee-8907-405a69b5f7d5-serviceca\") pod \"node-ca-tpt4n\" (UID: \"d06fba7e-f02e-4eee-8907-405a69b5f7d5\") " pod="openshift-image-registry/node-ca-tpt4n" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.005406 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.038245 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5hql\" (UniqueName: \"kubernetes.io/projected/d06fba7e-f02e-4eee-8907-405a69b5f7d5-kube-api-access-s5hql\") pod \"node-ca-tpt4n\" (UID: \"d06fba7e-f02e-4eee-8907-405a69b5f7d5\") " pod="openshift-image-registry/node-ca-tpt4n" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.069278 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.113377 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.131993 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-tpt4n" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.145621 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: W1203 19:30:07.151961 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd06fba7e_f02e_4eee_8907_405a69b5f7d5.slice/crio-05492fab4c44e4216565a8a720515e0af53eb3e743db055d0ae670cec4249125 WatchSource:0}: Error finding container 05492fab4c44e4216565a8a720515e0af53eb3e743db055d0ae670cec4249125: Status 404 returned error can't find the container with id 05492fab4c44e4216565a8a720515e0af53eb3e743db055d0ae670cec4249125 Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.187084 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.222521 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.265056 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.304497 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.348712 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.384513 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.633156 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.642168 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228"} Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.642234 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532"} Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.642253 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4"} Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.645326 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" event={"ID":"2a1fc2b9-c813-42d4-badd-f1f81e57b667","Type":"ContainerStarted","Data":"8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01"} Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.646690 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16"} Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.648167 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-tpt4n" event={"ID":"d06fba7e-f02e-4eee-8907-405a69b5f7d5","Type":"ContainerStarted","Data":"b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74"} Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.648201 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-tpt4n" event={"ID":"d06fba7e-f02e-4eee-8907-405a69b5f7d5","Type":"ContainerStarted","Data":"05492fab4c44e4216565a8a720515e0af53eb3e743db055d0ae670cec4249125"} Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.650709 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.655819 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.658685 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.674861 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.687579 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.700014 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.712730 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.728986 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.742955 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.759245 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.768761 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.805826 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.848114 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.887291 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.925414 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:07 crc kubenswrapper[4916]: I1203 19:30:07.969330 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:07Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.004997 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.047772 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.082051 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.094433 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.094476 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.094654 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.094656 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.094687 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.094700 4916 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.094671 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.094764 4916 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.094779 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:12.094751957 +0000 UTC m=+28.057562233 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.094800 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:12.094791488 +0000 UTC m=+28.057601764 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.128768 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.167177 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.195236 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.195360 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.195376 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:30:12.195360973 +0000 UTC m=+28.158171239 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.195424 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.195465 4916 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.195471 4916 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.195512 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:12.195501376 +0000 UTC m=+28.158311642 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.195535 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:12.195519347 +0000 UTC m=+28.158329613 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.208012 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.245410 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.285661 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.327392 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.373832 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.405284 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.445301 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.477243 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.477291 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.477441 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.477452 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.477648 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:08 crc kubenswrapper[4916]: E1203 19:30:08.477776 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.488092 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.660514 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42"} Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.660631 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa"} Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.660658 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53"} Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.663001 4916 generic.go:334] "Generic (PLEG): container finished" podID="2a1fc2b9-c813-42d4-badd-f1f81e57b667" containerID="8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01" exitCode=0 Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.663067 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" event={"ID":"2a1fc2b9-c813-42d4-badd-f1f81e57b667","Type":"ContainerDied","Data":"8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01"} Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.695078 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.707081 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.720667 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.733933 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.748970 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.765367 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.800534 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.818145 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.876812 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.900068 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.929254 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:08 crc kubenswrapper[4916]: I1203 19:30:08.966291 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.012508 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.046090 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.670882 4916 generic.go:334] "Generic (PLEG): container finished" podID="2a1fc2b9-c813-42d4-badd-f1f81e57b667" containerID="bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78" exitCode=0 Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.670934 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" event={"ID":"2a1fc2b9-c813-42d4-badd-f1f81e57b667","Type":"ContainerDied","Data":"bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78"} Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.685022 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.705817 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.723431 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.740773 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.756022 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.773503 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.786583 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.812848 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.824749 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.844136 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.860337 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.875516 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.888183 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:09 crc kubenswrapper[4916]: I1203 19:30:09.906857 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.004552 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.013355 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.014554 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.031643 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.043771 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.056807 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.072318 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.117205 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.134263 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.143913 4916 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.145402 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.145448 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.145460 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.145577 4916 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.153435 4916 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.153548 4916 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.154537 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.154610 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.154623 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.154641 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.154653 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.159063 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: E1203 19:30:10.166647 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.169880 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.169908 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.169917 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.169931 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.169941 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.173074 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: E1203 19:30:10.180108 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.183118 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.183156 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.183165 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.183181 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.183191 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.186203 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: E1203 19:30:10.193914 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.197281 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.197309 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.197318 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.197333 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.197345 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.197399 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: E1203 19:30:10.208408 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.212038 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.212084 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.212094 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.212108 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.212118 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.212143 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: E1203 19:30:10.223420 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: E1203 19:30:10.223549 4916 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.225029 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.225051 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.225059 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.225072 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.225080 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.231639 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.253857 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.272886 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.287189 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.302907 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.327358 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.327398 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.327409 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.327423 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.327434 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.344357 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.387403 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.422807 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.429531 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.429593 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.429612 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.429633 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.429644 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.473156 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.477072 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.477140 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.477161 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:10 crc kubenswrapper[4916]: E1203 19:30:10.477246 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:10 crc kubenswrapper[4916]: E1203 19:30:10.477446 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:10 crc kubenswrapper[4916]: E1203 19:30:10.477622 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.506922 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.532644 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.532701 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.532720 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.532781 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.532803 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.560830 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.593139 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.628701 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.635913 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.635981 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.636008 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.636038 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.636062 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.671018 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.678100 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" event={"ID":"2a1fc2b9-c813-42d4-badd-f1f81e57b667","Type":"ContainerStarted","Data":"28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09"} Dec 03 19:30:10 crc kubenswrapper[4916]: E1203 19:30:10.702510 4916 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.727719 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.738360 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.738401 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.738416 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.738435 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.738449 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.770123 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.809116 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.841240 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.841288 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.841303 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.841320 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.841332 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.856127 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.943868 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.943908 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.943916 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.943931 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:10 crc kubenswrapper[4916]: I1203 19:30:10.943939 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:10Z","lastTransitionTime":"2025-12-03T19:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.046476 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.046598 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.046621 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.046646 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.046664 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:11Z","lastTransitionTime":"2025-12-03T19:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.149054 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.149119 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.149136 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.149156 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.149177 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:11Z","lastTransitionTime":"2025-12-03T19:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.252041 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.252113 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.252127 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.252149 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.252164 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:11Z","lastTransitionTime":"2025-12-03T19:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.355224 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.355291 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.355316 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.355352 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.355396 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:11Z","lastTransitionTime":"2025-12-03T19:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.458777 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.458844 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.458861 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.458887 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.458904 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:11Z","lastTransitionTime":"2025-12-03T19:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.561729 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.561789 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.561810 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.561834 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.561852 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:11Z","lastTransitionTime":"2025-12-03T19:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.664155 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.664196 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.664207 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.664223 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.664233 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:11Z","lastTransitionTime":"2025-12-03T19:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.682843 4916 generic.go:334] "Generic (PLEG): container finished" podID="2a1fc2b9-c813-42d4-badd-f1f81e57b667" containerID="28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09" exitCode=0 Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.682918 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" event={"ID":"2a1fc2b9-c813-42d4-badd-f1f81e57b667","Type":"ContainerDied","Data":"28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.689270 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.699024 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.714008 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.730313 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.745069 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.760614 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.765842 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.765873 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.765882 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.765895 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.765904 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:11Z","lastTransitionTime":"2025-12-03T19:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.770665 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.789364 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.803491 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.832642 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.845271 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.863217 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.869093 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.869128 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.869140 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.869156 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.869168 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:11Z","lastTransitionTime":"2025-12-03T19:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.877224 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.892503 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.903411 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.915018 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.971551 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.971611 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.971626 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.971646 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:11 crc kubenswrapper[4916]: I1203 19:30:11.971661 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:11Z","lastTransitionTime":"2025-12-03T19:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.074119 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.074179 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.074196 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.074280 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.074298 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:12Z","lastTransitionTime":"2025-12-03T19:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.143453 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.143521 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.143706 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.143739 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.143753 4916 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.143816 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:20.143797657 +0000 UTC m=+36.106607933 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.143822 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.143853 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.143873 4916 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.143947 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:20.143925431 +0000 UTC m=+36.106735727 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.158110 4916 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.177193 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.177229 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.177238 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.177252 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.177261 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:12Z","lastTransitionTime":"2025-12-03T19:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.244934 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.245086 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.245118 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:30:20.245096271 +0000 UTC m=+36.207906547 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.245171 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.245212 4916 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.245276 4916 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.245301 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:20.245280276 +0000 UTC m=+36.208090582 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.245322 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:20.245312507 +0000 UTC m=+36.208122783 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.279374 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.279425 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.279436 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.279452 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.279463 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:12Z","lastTransitionTime":"2025-12-03T19:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.382164 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.382242 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.382265 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.382294 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.382311 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:12Z","lastTransitionTime":"2025-12-03T19:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.477898 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.478019 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.478393 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.478483 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.478846 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:12 crc kubenswrapper[4916]: E1203 19:30:12.479200 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.485428 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.485485 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.485507 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.485533 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.485553 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:12Z","lastTransitionTime":"2025-12-03T19:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.588516 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.588846 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.589038 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.589234 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.589368 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:12Z","lastTransitionTime":"2025-12-03T19:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.692264 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.692330 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.692347 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.692372 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.692392 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:12Z","lastTransitionTime":"2025-12-03T19:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.795219 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.795251 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.795258 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.795270 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.795279 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:12Z","lastTransitionTime":"2025-12-03T19:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.898189 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.898247 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.898263 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.898285 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:12 crc kubenswrapper[4916]: I1203 19:30:12.898302 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:12Z","lastTransitionTime":"2025-12-03T19:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.000954 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.000998 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.001014 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.001037 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.001055 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:13Z","lastTransitionTime":"2025-12-03T19:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.103736 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.103796 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.103812 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.103837 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.103855 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:13Z","lastTransitionTime":"2025-12-03T19:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.206151 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.206200 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.206210 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.206226 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.206239 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:13Z","lastTransitionTime":"2025-12-03T19:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.308803 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.308852 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.308868 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.308886 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.308898 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:13Z","lastTransitionTime":"2025-12-03T19:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.411518 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.411589 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.411602 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.411626 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.411638 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:13Z","lastTransitionTime":"2025-12-03T19:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.514605 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.514660 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.514673 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.514690 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.514704 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:13Z","lastTransitionTime":"2025-12-03T19:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.610279 4916 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.617962 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.618036 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.618051 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.618068 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.618079 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:13Z","lastTransitionTime":"2025-12-03T19:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.700466 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" event={"ID":"2a1fc2b9-c813-42d4-badd-f1f81e57b667","Type":"ContainerStarted","Data":"51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.704751 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.705087 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.716297 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.719850 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.719903 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.719921 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.719982 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.720000 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:13Z","lastTransitionTime":"2025-12-03T19:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.737736 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.758326 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.776205 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.792219 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.803962 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.823109 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.823157 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.823175 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.823200 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.823217 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:13Z","lastTransitionTime":"2025-12-03T19:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.825512 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.840158 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.878285 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.895618 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.914614 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.925839 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.925883 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.925894 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.925910 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.925922 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:13Z","lastTransitionTime":"2025-12-03T19:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.928121 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.939499 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.940124 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.951026 4916 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.959325 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.978594 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:13 crc kubenswrapper[4916]: I1203 19:30:13.999554 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:13Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.012182 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.025862 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.028203 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.028231 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.028239 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.028269 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.028280 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:14Z","lastTransitionTime":"2025-12-03T19:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.037636 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.053921 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.065673 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.078582 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.087829 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.104015 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.113172 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.130606 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.130640 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.130651 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.130666 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.130677 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:14Z","lastTransitionTime":"2025-12-03T19:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.130746 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.143577 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.160779 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.175337 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.194543 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.233646 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.233693 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.233704 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.233724 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.233737 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:14Z","lastTransitionTime":"2025-12-03T19:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.336914 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.336985 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.337052 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.337084 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.337108 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:14Z","lastTransitionTime":"2025-12-03T19:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.440352 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.440405 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.440422 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.440441 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.440455 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:14Z","lastTransitionTime":"2025-12-03T19:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.482184 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:14 crc kubenswrapper[4916]: E1203 19:30:14.482342 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.482421 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:14 crc kubenswrapper[4916]: E1203 19:30:14.482505 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.483104 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:14 crc kubenswrapper[4916]: E1203 19:30:14.483949 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.507899 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.527893 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.544480 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.544560 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.544616 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.544648 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.544669 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:14Z","lastTransitionTime":"2025-12-03T19:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.546233 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.566035 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.581959 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.618099 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.635235 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.647333 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.647399 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.647426 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.647455 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.647478 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:14Z","lastTransitionTime":"2025-12-03T19:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.668174 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.690935 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.712930 4916 generic.go:334] "Generic (PLEG): container finished" podID="2a1fc2b9-c813-42d4-badd-f1f81e57b667" containerID="51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838" exitCode=0 Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.714708 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" event={"ID":"2a1fc2b9-c813-42d4-badd-f1f81e57b667","Type":"ContainerDied","Data":"51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838"} Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.714760 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.714855 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.717392 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.734200 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.750202 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.750245 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.750255 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.750272 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.750283 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:14Z","lastTransitionTime":"2025-12-03T19:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.752030 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.756175 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.773259 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.789959 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.816046 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.833876 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.847078 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.855299 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.855341 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.855351 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.855368 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.855380 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:14Z","lastTransitionTime":"2025-12-03T19:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.870228 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.885207 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.906723 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.924437 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.938730 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.953429 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.961604 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.961743 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.961786 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.961819 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.961847 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:14Z","lastTransitionTime":"2025-12-03T19:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.969267 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:14 crc kubenswrapper[4916]: I1203 19:30:14.986346 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.002589 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.021449 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.035369 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.046878 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.063181 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.064838 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.064861 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.064886 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.064900 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.064908 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:15Z","lastTransitionTime":"2025-12-03T19:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.167928 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.168005 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.168027 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.168056 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.168078 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:15Z","lastTransitionTime":"2025-12-03T19:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.271024 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.271094 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.271114 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.271144 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.271166 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:15Z","lastTransitionTime":"2025-12-03T19:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.374031 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.374074 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.374083 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.374099 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.374111 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:15Z","lastTransitionTime":"2025-12-03T19:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.477202 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.477267 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.477278 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.477301 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.477310 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:15Z","lastTransitionTime":"2025-12-03T19:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.579546 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.579604 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.579617 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.579634 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.579644 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:15Z","lastTransitionTime":"2025-12-03T19:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.683108 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.683161 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.683177 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.683198 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.683212 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:15Z","lastTransitionTime":"2025-12-03T19:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.719283 4916 generic.go:334] "Generic (PLEG): container finished" podID="2a1fc2b9-c813-42d4-badd-f1f81e57b667" containerID="0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14" exitCode=0 Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.719682 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" event={"ID":"2a1fc2b9-c813-42d4-badd-f1f81e57b667","Type":"ContainerDied","Data":"0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14"} Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.742321 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.755963 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.773936 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.785477 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.785517 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.785575 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.785595 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.785607 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:15Z","lastTransitionTime":"2025-12-03T19:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.786428 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.797998 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.810061 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.826722 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.842133 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.855373 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.868192 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.883512 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.887703 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.887734 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.887745 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.887759 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.887769 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:15Z","lastTransitionTime":"2025-12-03T19:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.905720 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.916814 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.931706 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.942257 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:15Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.994431 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.995051 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.995069 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.995085 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:15 crc kubenswrapper[4916]: I1203 19:30:15.995096 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:15Z","lastTransitionTime":"2025-12-03T19:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.098557 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.098677 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.098701 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.098731 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.098754 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:16Z","lastTransitionTime":"2025-12-03T19:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.206676 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.206753 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.206770 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.206796 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.206814 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:16Z","lastTransitionTime":"2025-12-03T19:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.310010 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.310074 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.310094 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.310120 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.310140 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:16Z","lastTransitionTime":"2025-12-03T19:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.413073 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.413125 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.413138 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.413155 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.413166 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:16Z","lastTransitionTime":"2025-12-03T19:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.478106 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.478164 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.478243 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:16 crc kubenswrapper[4916]: E1203 19:30:16.478434 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:16 crc kubenswrapper[4916]: E1203 19:30:16.478507 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:16 crc kubenswrapper[4916]: E1203 19:30:16.478641 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.504718 4916 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.515772 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.515833 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.515852 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.515881 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.515899 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:16Z","lastTransitionTime":"2025-12-03T19:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.619032 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.619083 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.619097 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.619118 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.619133 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:16Z","lastTransitionTime":"2025-12-03T19:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.722555 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.722636 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.722653 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.722679 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.722697 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:16Z","lastTransitionTime":"2025-12-03T19:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.726139 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/0.log" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.731124 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b" exitCode=1 Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.731251 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.732264 4916 scope.go:117] "RemoveContainer" containerID="f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.736487 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" event={"ID":"2a1fc2b9-c813-42d4-badd-f1f81e57b667","Type":"ContainerStarted","Data":"2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.748747 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.762703 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.778760 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.794908 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"message\\\":\\\"flector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 19:30:15.813979 6162 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:15.813995 6162 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:15.814037 6162 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:15.814043 6162 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:15.814046 6162 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:15.814057 6162 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 19:30:15.814059 6162 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:15.814067 6162 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:15.814069 6162 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:15.814075 6162 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1203 19:30:15.814082 6162 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:15.814125 6162 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 19:30:15.814140 6162 factory.go:656] Stopping watch factory\\\\nI1203 19:30:15.814153 6162 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 19:30:15.814160 6162 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.812066 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.825929 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.825966 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.825978 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.825997 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.826010 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:16Z","lastTransitionTime":"2025-12-03T19:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.849402 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.863127 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.878887 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.896357 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.911521 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.926870 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.928338 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.928390 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.928399 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.928413 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.928428 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:16Z","lastTransitionTime":"2025-12-03T19:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.943185 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.964508 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.980123 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:16 crc kubenswrapper[4916]: I1203 19:30:16.993534 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:16Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.006162 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.020671 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.045390 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.045443 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.045468 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.045497 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.045520 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:17Z","lastTransitionTime":"2025-12-03T19:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.060179 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.090394 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.104585 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.117543 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.130478 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.143920 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.148037 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.148076 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.148087 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.148105 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.148117 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:17Z","lastTransitionTime":"2025-12-03T19:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.154173 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.172325 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"message\\\":\\\"flector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 19:30:15.813979 6162 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:15.813995 6162 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:15.814037 6162 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:15.814043 6162 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:15.814046 6162 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:15.814057 6162 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 19:30:15.814059 6162 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:15.814067 6162 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:15.814069 6162 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:15.814075 6162 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1203 19:30:15.814082 6162 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:15.814125 6162 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 19:30:15.814140 6162 factory.go:656] Stopping watch factory\\\\nI1203 19:30:15.814153 6162 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 19:30:15.814160 6162 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.182451 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.200419 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.218202 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.236134 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.250850 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.250920 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.250944 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.251017 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.251040 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:17Z","lastTransitionTime":"2025-12-03T19:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.256254 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.286610 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.347835 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz"] Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.348234 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.351506 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.352889 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.353202 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.353238 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.353250 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.353268 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.353281 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:17Z","lastTransitionTime":"2025-12-03T19:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.372097 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.390342 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.402661 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf2dw\" (UniqueName: \"kubernetes.io/projected/5504feb0-62e6-45d1-8ca0-e7541ec0269f-kube-api-access-xf2dw\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.402736 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5504feb0-62e6-45d1-8ca0-e7541ec0269f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.402771 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5504feb0-62e6-45d1-8ca0-e7541ec0269f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.402795 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5504feb0-62e6-45d1-8ca0-e7541ec0269f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.406239 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.426391 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.441476 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.455659 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.455707 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.455723 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.455742 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.455759 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:17Z","lastTransitionTime":"2025-12-03T19:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.470552 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"message\\\":\\\"flector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 19:30:15.813979 6162 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:15.813995 6162 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:15.814037 6162 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:15.814043 6162 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:15.814046 6162 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:15.814057 6162 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 19:30:15.814059 6162 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:15.814067 6162 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:15.814069 6162 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:15.814075 6162 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1203 19:30:15.814082 6162 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:15.814125 6162 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 19:30:15.814140 6162 factory.go:656] Stopping watch factory\\\\nI1203 19:30:15.814153 6162 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 19:30:15.814160 6162 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.485855 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.503945 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf2dw\" (UniqueName: \"kubernetes.io/projected/5504feb0-62e6-45d1-8ca0-e7541ec0269f-kube-api-access-xf2dw\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.504009 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5504feb0-62e6-45d1-8ca0-e7541ec0269f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.504031 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5504feb0-62e6-45d1-8ca0-e7541ec0269f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.504050 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5504feb0-62e6-45d1-8ca0-e7541ec0269f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.504721 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5504feb0-62e6-45d1-8ca0-e7541ec0269f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.505045 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5504feb0-62e6-45d1-8ca0-e7541ec0269f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.519019 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.519110 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5504feb0-62e6-45d1-8ca0-e7541ec0269f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.526207 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf2dw\" (UniqueName: \"kubernetes.io/projected/5504feb0-62e6-45d1-8ca0-e7541ec0269f-kube-api-access-xf2dw\") pod \"ovnkube-control-plane-749d76644c-9mxpz\" (UID: \"5504feb0-62e6-45d1-8ca0-e7541ec0269f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.540030 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.558269 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.558886 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.558927 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.558936 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.558951 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.558961 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:17Z","lastTransitionTime":"2025-12-03T19:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.571015 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.589396 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.602688 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.623112 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.638724 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.655729 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.661759 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.661799 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.661812 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.661828 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.661847 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:17Z","lastTransitionTime":"2025-12-03T19:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.669222 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" Dec 03 19:30:17 crc kubenswrapper[4916]: W1203 19:30:17.687171 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5504feb0_62e6_45d1_8ca0_e7541ec0269f.slice/crio-3cd704f09355c624bc1542686af7bc88b5a6f982691a7c5053034d2fb0501051 WatchSource:0}: Error finding container 3cd704f09355c624bc1542686af7bc88b5a6f982691a7c5053034d2fb0501051: Status 404 returned error can't find the container with id 3cd704f09355c624bc1542686af7bc88b5a6f982691a7c5053034d2fb0501051 Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.742696 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/0.log" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.746921 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d"} Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.747449 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.750595 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" event={"ID":"5504feb0-62e6-45d1-8ca0-e7541ec0269f","Type":"ContainerStarted","Data":"3cd704f09355c624bc1542686af7bc88b5a6f982691a7c5053034d2fb0501051"} Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.766198 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.766231 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.766241 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.766259 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.766271 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:17Z","lastTransitionTime":"2025-12-03T19:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.770399 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.779882 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.795912 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"message\\\":\\\"flector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 19:30:15.813979 6162 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:15.813995 6162 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:15.814037 6162 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:15.814043 6162 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:15.814046 6162 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:15.814057 6162 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 19:30:15.814059 6162 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:15.814067 6162 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:15.814069 6162 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:15.814075 6162 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1203 19:30:15.814082 6162 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:15.814125 6162 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 19:30:15.814140 6162 factory.go:656] Stopping watch factory\\\\nI1203 19:30:15.814153 6162 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 19:30:15.814160 6162 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.806895 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.825050 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.838703 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.850769 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.861271 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.868215 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.868268 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.868283 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.868301 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.868314 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:17Z","lastTransitionTime":"2025-12-03T19:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.875097 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.889090 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.899898 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.912823 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.927415 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.941550 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.953738 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.968265 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:17Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.969940 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.969961 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.969969 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.969982 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:17 crc kubenswrapper[4916]: I1203 19:30:17.969990 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:17Z","lastTransitionTime":"2025-12-03T19:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.072289 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.072343 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.072354 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.072369 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.072379 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:18Z","lastTransitionTime":"2025-12-03T19:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.175280 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.175317 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.175328 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.175344 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.175356 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:18Z","lastTransitionTime":"2025-12-03T19:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.277470 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.277750 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.277847 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.277954 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.278220 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:18Z","lastTransitionTime":"2025-12-03T19:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.381027 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.381094 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.381114 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.381143 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.381162 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:18Z","lastTransitionTime":"2025-12-03T19:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.477496 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.477547 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.477610 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:18 crc kubenswrapper[4916]: E1203 19:30:18.477746 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:18 crc kubenswrapper[4916]: E1203 19:30:18.477878 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:18 crc kubenswrapper[4916]: E1203 19:30:18.477982 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.483351 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.483405 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.483424 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.483469 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.483489 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:18Z","lastTransitionTime":"2025-12-03T19:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.586740 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.586804 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.586816 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.586833 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.586846 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:18Z","lastTransitionTime":"2025-12-03T19:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.689160 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.689196 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.689206 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.689222 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.689234 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:18Z","lastTransitionTime":"2025-12-03T19:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.755512 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/1.log" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.756445 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/0.log" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.760300 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d" exitCode=1 Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.760376 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.760419 4916 scope.go:117] "RemoveContainer" containerID="f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.761440 4916 scope.go:117] "RemoveContainer" containerID="2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d" Dec 03 19:30:18 crc kubenswrapper[4916]: E1203 19:30:18.761716 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.764693 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" event={"ID":"5504feb0-62e6-45d1-8ca0-e7541ec0269f","Type":"ContainerStarted","Data":"00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.764724 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" event={"ID":"5504feb0-62e6-45d1-8ca0-e7541ec0269f","Type":"ContainerStarted","Data":"1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.775711 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.794158 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.794824 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.794883 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.794919 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.794954 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.794979 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:18Z","lastTransitionTime":"2025-12-03T19:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.811608 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.826591 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.827597 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-kbxgw"] Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.828464 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:18 crc kubenswrapper[4916]: E1203 19:30:18.828609 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.841138 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.862922 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"message\\\":\\\"flector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 19:30:15.813979 6162 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:15.813995 6162 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:15.814037 6162 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:15.814043 6162 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:15.814046 6162 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:15.814057 6162 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 19:30:15.814059 6162 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:15.814067 6162 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:15.814069 6162 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:15.814075 6162 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1203 19:30:15.814082 6162 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:15.814125 6162 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 19:30:15.814140 6162 factory.go:656] Stopping watch factory\\\\nI1203 19:30:15.814153 6162 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 19:30:15.814160 6162 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"\\\\nI1203 19:30:18.424323 6356 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 19:30:18.424375 6356 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 19:30:18.424423 6356 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 19:30:18.424457 6356 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 19:30:18.424536 6356 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:18.424579 6356 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:18.424609 6356 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:18.424626 6356 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:18.426716 6356 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:18.426755 6356 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:18.426786 6356 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:18.426797 6356 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:18.426831 6356 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 19:30:18.426847 6356 factory.go:656] Stopping watch factory\\\\nI1203 19:30:18.426848 6356 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:18.426866 6356 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.875874 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.897520 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.897581 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.897593 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.897613 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.897626 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:18Z","lastTransitionTime":"2025-12-03T19:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.897895 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.913921 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.933800 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.950912 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.970591 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:18 crc kubenswrapper[4916]: I1203 19:30:18.985912 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.002786 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:18Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.002825 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.002968 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.002983 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.002999 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.003011 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:19Z","lastTransitionTime":"2025-12-03T19:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.018786 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.038066 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.051857 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.065428 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.078876 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.093483 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.102675 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65df2\" (UniqueName: \"kubernetes.io/projected/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-kube-api-access-65df2\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.102736 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.104307 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.104989 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.105018 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.105027 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.105043 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.105053 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:19Z","lastTransitionTime":"2025-12-03T19:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.121626 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f1d41ecbe87c00013b8483bc57f32a61a3c9f51bf6d363587090f6379381b81b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"message\\\":\\\"flector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 19:30:15.813979 6162 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:15.813995 6162 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:15.814037 6162 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:15.814043 6162 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:15.814046 6162 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:15.814057 6162 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 19:30:15.814059 6162 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:15.814067 6162 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:15.814069 6162 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:15.814075 6162 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1203 19:30:15.814082 6162 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:15.814125 6162 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 19:30:15.814140 6162 factory.go:656] Stopping watch factory\\\\nI1203 19:30:15.814153 6162 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 19:30:15.814160 6162 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:13Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"\\\\nI1203 19:30:18.424323 6356 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 19:30:18.424375 6356 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 19:30:18.424423 6356 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 19:30:18.424457 6356 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 19:30:18.424536 6356 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:18.424579 6356 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:18.424609 6356 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:18.424626 6356 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:18.426716 6356 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:18.426755 6356 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:18.426786 6356 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:18.426797 6356 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:18.426831 6356 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 19:30:18.426847 6356 factory.go:656] Stopping watch factory\\\\nI1203 19:30:18.426848 6356 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:18.426866 6356 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.130718 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.140717 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.165512 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.185088 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.199890 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.203240 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.203284 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65df2\" (UniqueName: \"kubernetes.io/projected/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-kube-api-access-65df2\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:19 crc kubenswrapper[4916]: E1203 19:30:19.203373 4916 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:19 crc kubenswrapper[4916]: E1203 19:30:19.203451 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs podName:9ae5584e-d1d9-4aa9-955a-41bdf15f0461 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:19.703427338 +0000 UTC m=+35.666237634 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs") pod "network-metrics-daemon-kbxgw" (UID: "9ae5584e-d1d9-4aa9-955a-41bdf15f0461") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.207355 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.207426 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.207445 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.207483 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.207531 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:19Z","lastTransitionTime":"2025-12-03T19:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.214194 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.227861 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65df2\" (UniqueName: \"kubernetes.io/projected/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-kube-api-access-65df2\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.242738 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.257557 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.273673 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.293461 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.309331 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.310299 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.310353 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.310370 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.310392 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.310409 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:19Z","lastTransitionTime":"2025-12-03T19:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.413891 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.413962 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.413986 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.414015 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.414041 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:19Z","lastTransitionTime":"2025-12-03T19:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.516544 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.516625 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.516646 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.516669 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.516686 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:19Z","lastTransitionTime":"2025-12-03T19:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.619650 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.619720 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.619742 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.619770 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.619792 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:19Z","lastTransitionTime":"2025-12-03T19:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.710048 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:19 crc kubenswrapper[4916]: E1203 19:30:19.710241 4916 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:19 crc kubenswrapper[4916]: E1203 19:30:19.710325 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs podName:9ae5584e-d1d9-4aa9-955a-41bdf15f0461 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:20.710302038 +0000 UTC m=+36.673112344 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs") pod "network-metrics-daemon-kbxgw" (UID: "9ae5584e-d1d9-4aa9-955a-41bdf15f0461") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.721720 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.721763 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.721773 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.721792 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.721803 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:19Z","lastTransitionTime":"2025-12-03T19:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.770482 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/1.log" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.777248 4916 scope.go:117] "RemoveContainer" containerID="2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d" Dec 03 19:30:19 crc kubenswrapper[4916]: E1203 19:30:19.777667 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.799717 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.811785 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.824945 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.825000 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.825012 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.825034 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.825048 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:19Z","lastTransitionTime":"2025-12-03T19:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.835091 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.851047 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.869545 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.887318 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.902094 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.916500 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.928203 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.928255 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.928271 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.928290 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.928304 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:19Z","lastTransitionTime":"2025-12-03T19:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.946536 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"\\\\nI1203 19:30:18.424323 6356 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 19:30:18.424375 6356 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 19:30:18.424423 6356 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 19:30:18.424457 6356 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 19:30:18.424536 6356 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:18.424579 6356 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:18.424609 6356 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:18.424626 6356 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:18.426716 6356 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:18.426755 6356 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:18.426786 6356 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:18.426797 6356 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:18.426831 6356 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 19:30:18.426847 6356 factory.go:656] Stopping watch factory\\\\nI1203 19:30:18.426848 6356 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:18.426866 6356 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.959307 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.979271 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:19 crc kubenswrapper[4916]: I1203 19:30:19.994085 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:19Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.014982 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:20Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.031621 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.031693 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.031714 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.031745 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.031769 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.034001 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:20Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.053503 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:20Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.084122 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:20Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.104278 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:20Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.135763 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.135825 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.135838 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.135868 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.136095 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.214489 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.214539 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.214687 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.214709 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.214721 4916 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.214772 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:36.214754294 +0000 UTC m=+52.177564560 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.214778 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.214819 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.214843 4916 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.214911 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:36.214887298 +0000 UTC m=+52.177697604 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.230355 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.230394 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.230404 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.230418 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.230427 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.241911 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:20Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.247406 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.247440 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.247453 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.247471 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.247481 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.269994 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:20Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.274753 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.274805 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.274819 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.274845 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.274864 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.294483 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:20Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.310915 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.310956 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.310971 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.310993 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.311008 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.315884 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.316009 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.316047 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.316204 4916 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.316261 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:36.316247473 +0000 UTC m=+52.279057739 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.316256 4916 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.316352 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:30:36.316310065 +0000 UTC m=+52.279120371 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.316437 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:36.316391237 +0000 UTC m=+52.279201543 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.335394 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:20Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.340084 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.340139 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.340157 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.340184 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.340203 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.362286 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:20Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.362463 4916 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.364362 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.364403 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.364416 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.364436 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.364456 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.467499 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.467605 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.467625 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.467654 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.467675 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.477876 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.477959 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.478023 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.478095 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.478135 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.478220 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.478331 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.478446 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.570742 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.570824 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.570843 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.570873 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.570898 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.675183 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.675220 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.675231 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.675249 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.675261 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.719624 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.719803 4916 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:20 crc kubenswrapper[4916]: E1203 19:30:20.719873 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs podName:9ae5584e-d1d9-4aa9-955a-41bdf15f0461 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:22.719850768 +0000 UTC m=+38.682661044 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs") pod "network-metrics-daemon-kbxgw" (UID: "9ae5584e-d1d9-4aa9-955a-41bdf15f0461") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.777879 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.778330 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.778537 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.778798 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.779004 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.881864 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.881940 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.881965 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.881996 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.882017 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.985086 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.985160 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.985182 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.985214 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:20 crc kubenswrapper[4916]: I1203 19:30:20.985235 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:20Z","lastTransitionTime":"2025-12-03T19:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.087891 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.087971 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.087989 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.088014 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.088030 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:21Z","lastTransitionTime":"2025-12-03T19:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.190579 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.190624 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.190636 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.190655 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.190668 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:21Z","lastTransitionTime":"2025-12-03T19:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.293648 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.293707 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.293720 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.293738 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.293751 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:21Z","lastTransitionTime":"2025-12-03T19:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.396429 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.396479 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.396490 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.396507 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.396524 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:21Z","lastTransitionTime":"2025-12-03T19:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.498727 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.499120 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.499275 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.499431 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.499615 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:21Z","lastTransitionTime":"2025-12-03T19:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.602310 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.602393 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.602416 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.602447 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.602470 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:21Z","lastTransitionTime":"2025-12-03T19:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.706252 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.706314 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.706334 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.706360 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.706376 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:21Z","lastTransitionTime":"2025-12-03T19:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.808699 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.808761 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.808777 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.808803 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.808820 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:21Z","lastTransitionTime":"2025-12-03T19:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.911999 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.912106 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.912127 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.912153 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:21 crc kubenswrapper[4916]: I1203 19:30:21.912172 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:21Z","lastTransitionTime":"2025-12-03T19:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.015316 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.015387 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.015412 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.015445 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.015470 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:22Z","lastTransitionTime":"2025-12-03T19:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.118070 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.118118 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.118131 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.118154 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.118171 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:22Z","lastTransitionTime":"2025-12-03T19:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.220798 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.220850 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.220861 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.220879 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.220890 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:22Z","lastTransitionTime":"2025-12-03T19:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.323050 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.323110 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.323128 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.323151 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.323169 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:22Z","lastTransitionTime":"2025-12-03T19:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.426198 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.426243 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.426253 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.426270 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.426282 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:22Z","lastTransitionTime":"2025-12-03T19:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.477325 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.477376 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.477433 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:22 crc kubenswrapper[4916]: E1203 19:30:22.477557 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:22 crc kubenswrapper[4916]: E1203 19:30:22.477783 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.477812 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:22 crc kubenswrapper[4916]: E1203 19:30:22.477943 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:22 crc kubenswrapper[4916]: E1203 19:30:22.478700 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.529216 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.529265 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.529277 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.529294 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.529307 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:22Z","lastTransitionTime":"2025-12-03T19:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.631850 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.631904 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.631917 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.631933 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.631944 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:22Z","lastTransitionTime":"2025-12-03T19:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.734245 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.734308 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.734325 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.734347 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.734360 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:22Z","lastTransitionTime":"2025-12-03T19:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.736001 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:22 crc kubenswrapper[4916]: E1203 19:30:22.736217 4916 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:22 crc kubenswrapper[4916]: E1203 19:30:22.736315 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs podName:9ae5584e-d1d9-4aa9-955a-41bdf15f0461 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:26.736289307 +0000 UTC m=+42.699099773 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs") pod "network-metrics-daemon-kbxgw" (UID: "9ae5584e-d1d9-4aa9-955a-41bdf15f0461") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.837995 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.838084 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.838102 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.838132 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.838189 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:22Z","lastTransitionTime":"2025-12-03T19:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.941661 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.941964 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.942098 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.942344 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:22 crc kubenswrapper[4916]: I1203 19:30:22.942483 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:22Z","lastTransitionTime":"2025-12-03T19:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.045602 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.045641 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.045649 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.045662 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.045670 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:23Z","lastTransitionTime":"2025-12-03T19:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.148457 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.148514 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.148531 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.148557 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.148614 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:23Z","lastTransitionTime":"2025-12-03T19:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.252235 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.252309 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.252330 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.252363 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.252385 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:23Z","lastTransitionTime":"2025-12-03T19:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.355447 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.355513 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.355530 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.355555 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.355610 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:23Z","lastTransitionTime":"2025-12-03T19:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.457701 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.457769 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.457782 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.457799 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.457810 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:23Z","lastTransitionTime":"2025-12-03T19:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.561381 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.561442 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.561460 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.561482 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.561502 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:23Z","lastTransitionTime":"2025-12-03T19:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.665646 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.665694 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.665710 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.665736 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.665753 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:23Z","lastTransitionTime":"2025-12-03T19:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.768373 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.768440 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.768458 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.768484 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.768503 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:23Z","lastTransitionTime":"2025-12-03T19:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.871540 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.871644 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.871661 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.871685 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.871702 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:23Z","lastTransitionTime":"2025-12-03T19:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.974621 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.974694 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.974717 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.974750 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:23 crc kubenswrapper[4916]: I1203 19:30:23.974773 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:23Z","lastTransitionTime":"2025-12-03T19:30:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.078335 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.078432 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.078456 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.078486 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.078509 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:24Z","lastTransitionTime":"2025-12-03T19:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.182541 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.182679 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.182697 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.182726 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.182745 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:24Z","lastTransitionTime":"2025-12-03T19:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.285872 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.285917 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.285928 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.285946 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.285958 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:24Z","lastTransitionTime":"2025-12-03T19:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.389171 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.389245 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.389263 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.389288 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.389307 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:24Z","lastTransitionTime":"2025-12-03T19:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.477855 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:24 crc kubenswrapper[4916]: E1203 19:30:24.478061 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.478157 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.478256 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.478190 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:24 crc kubenswrapper[4916]: E1203 19:30:24.478444 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:24 crc kubenswrapper[4916]: E1203 19:30:24.478660 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:24 crc kubenswrapper[4916]: E1203 19:30:24.478780 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.492383 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.493054 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.493073 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.493114 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.493135 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:24Z","lastTransitionTime":"2025-12-03T19:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.493415 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.506095 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.524025 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.540339 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.553394 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.586597 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"\\\\nI1203 19:30:18.424323 6356 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 19:30:18.424375 6356 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 19:30:18.424423 6356 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 19:30:18.424457 6356 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 19:30:18.424536 6356 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:18.424579 6356 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:18.424609 6356 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:18.424626 6356 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:18.426716 6356 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:18.426755 6356 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:18.426786 6356 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:18.426797 6356 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:18.426831 6356 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 19:30:18.426847 6356 factory.go:656] Stopping watch factory\\\\nI1203 19:30:18.426848 6356 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:18.426866 6356 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.596489 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.596599 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.596628 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.596662 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.596683 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:24Z","lastTransitionTime":"2025-12-03T19:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.602763 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.631771 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.653656 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.673356 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.690013 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.699259 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.699329 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.699353 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.699381 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.699402 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:24Z","lastTransitionTime":"2025-12-03T19:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.705198 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.719844 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.732330 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.746640 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.763884 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.780418 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.801492 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.801544 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.801554 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.801597 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.801611 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:24Z","lastTransitionTime":"2025-12-03T19:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.905284 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.905376 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.905397 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.905431 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:24 crc kubenswrapper[4916]: I1203 19:30:24.905451 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:24Z","lastTransitionTime":"2025-12-03T19:30:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.008837 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.008894 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.008905 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.008928 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.008944 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:25Z","lastTransitionTime":"2025-12-03T19:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.113145 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.113205 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.113225 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.113253 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.113271 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:25Z","lastTransitionTime":"2025-12-03T19:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.217139 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.217198 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.217216 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.217245 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.217264 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:25Z","lastTransitionTime":"2025-12-03T19:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.320118 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.320186 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.320209 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.320287 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.320309 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:25Z","lastTransitionTime":"2025-12-03T19:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.424271 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.424371 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.424398 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.424433 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.424471 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:25Z","lastTransitionTime":"2025-12-03T19:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.526704 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.526760 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.526777 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.526800 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.526816 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:25Z","lastTransitionTime":"2025-12-03T19:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.629088 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.629122 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.629131 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.629147 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.629157 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:25Z","lastTransitionTime":"2025-12-03T19:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.732314 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.732359 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.732371 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.732386 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.732396 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:25Z","lastTransitionTime":"2025-12-03T19:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.834384 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.834457 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.834472 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.834487 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.834498 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:25Z","lastTransitionTime":"2025-12-03T19:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.937804 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.937878 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.937899 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.937938 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:25 crc kubenswrapper[4916]: I1203 19:30:25.937994 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:25Z","lastTransitionTime":"2025-12-03T19:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.041422 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.041498 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.041521 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.041614 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.041641 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:26Z","lastTransitionTime":"2025-12-03T19:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.143917 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.143956 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.143968 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.143991 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.144005 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:26Z","lastTransitionTime":"2025-12-03T19:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.246480 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.246528 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.246541 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.246558 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.246595 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:26Z","lastTransitionTime":"2025-12-03T19:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.348910 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.348940 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.348966 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.348981 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.348989 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:26Z","lastTransitionTime":"2025-12-03T19:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.451321 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.451378 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.451394 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.451416 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.451434 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:26Z","lastTransitionTime":"2025-12-03T19:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.477167 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.477167 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.477234 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.477191 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:26 crc kubenswrapper[4916]: E1203 19:30:26.477340 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:26 crc kubenswrapper[4916]: E1203 19:30:26.477406 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:26 crc kubenswrapper[4916]: E1203 19:30:26.477527 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:26 crc kubenswrapper[4916]: E1203 19:30:26.477606 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.554603 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.554671 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.554688 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.554715 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.554731 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:26Z","lastTransitionTime":"2025-12-03T19:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.658500 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.658614 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.658630 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.658674 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.658688 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:26Z","lastTransitionTime":"2025-12-03T19:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.762126 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.762263 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.762287 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.762318 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.762340 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:26Z","lastTransitionTime":"2025-12-03T19:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.779622 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:26 crc kubenswrapper[4916]: E1203 19:30:26.779791 4916 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:26 crc kubenswrapper[4916]: E1203 19:30:26.779858 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs podName:9ae5584e-d1d9-4aa9-955a-41bdf15f0461 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:34.779838505 +0000 UTC m=+50.742648781 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs") pod "network-metrics-daemon-kbxgw" (UID: "9ae5584e-d1d9-4aa9-955a-41bdf15f0461") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.866345 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.866394 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.866411 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.866435 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.866454 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:26Z","lastTransitionTime":"2025-12-03T19:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.970376 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.970469 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.970488 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.970517 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:26 crc kubenswrapper[4916]: I1203 19:30:26.970536 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:26Z","lastTransitionTime":"2025-12-03T19:30:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.074393 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.074994 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.075008 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.075028 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.075040 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:27Z","lastTransitionTime":"2025-12-03T19:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.177955 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.177992 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.178002 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.178015 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.178025 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:27Z","lastTransitionTime":"2025-12-03T19:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.280898 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.280981 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.281010 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.281043 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.281062 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:27Z","lastTransitionTime":"2025-12-03T19:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.384076 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.384139 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.384157 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.384181 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.384198 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:27Z","lastTransitionTime":"2025-12-03T19:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.487503 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.487639 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.487658 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.487680 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.487693 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:27Z","lastTransitionTime":"2025-12-03T19:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.591198 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.591411 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.591428 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.591455 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.591474 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:27Z","lastTransitionTime":"2025-12-03T19:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.694799 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.694873 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.694890 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.694916 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.694933 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:27Z","lastTransitionTime":"2025-12-03T19:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.801072 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.801136 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.801155 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.801182 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.801205 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:27Z","lastTransitionTime":"2025-12-03T19:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.905186 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.905279 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.905299 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.905328 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:27 crc kubenswrapper[4916]: I1203 19:30:27.905348 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:27Z","lastTransitionTime":"2025-12-03T19:30:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.008875 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.008916 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.008927 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.008944 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.008954 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:28Z","lastTransitionTime":"2025-12-03T19:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.112195 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.112305 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.112327 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.112861 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.113095 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:28Z","lastTransitionTime":"2025-12-03T19:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.216759 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.216823 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.216842 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.216869 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.216888 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:28Z","lastTransitionTime":"2025-12-03T19:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.320661 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.320727 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.320746 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.320774 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.320793 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:28Z","lastTransitionTime":"2025-12-03T19:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.424492 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.424622 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.424643 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.424673 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.424694 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:28Z","lastTransitionTime":"2025-12-03T19:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.478249 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.478294 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.478363 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.478421 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:28 crc kubenswrapper[4916]: E1203 19:30:28.478670 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:28 crc kubenswrapper[4916]: E1203 19:30:28.478847 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:28 crc kubenswrapper[4916]: E1203 19:30:28.478938 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:28 crc kubenswrapper[4916]: E1203 19:30:28.479227 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.528136 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.528214 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.528228 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.528258 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.528273 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:28Z","lastTransitionTime":"2025-12-03T19:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.631463 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.631525 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.631543 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.631586 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.631600 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:28Z","lastTransitionTime":"2025-12-03T19:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.734323 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.734374 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.734387 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.734404 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.734420 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:28Z","lastTransitionTime":"2025-12-03T19:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.837977 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.838081 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.838109 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.838150 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.838177 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:28Z","lastTransitionTime":"2025-12-03T19:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.940875 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.940926 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.940938 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.940959 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:28 crc kubenswrapper[4916]: I1203 19:30:28.940973 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:28Z","lastTransitionTime":"2025-12-03T19:30:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.043562 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.043640 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.043657 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.043680 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.043697 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:29Z","lastTransitionTime":"2025-12-03T19:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.146167 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.146212 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.146224 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.146241 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.146252 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:29Z","lastTransitionTime":"2025-12-03T19:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.248406 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.248457 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.248469 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.248487 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.248502 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:29Z","lastTransitionTime":"2025-12-03T19:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.350525 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.350597 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.350609 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.350627 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.350639 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:29Z","lastTransitionTime":"2025-12-03T19:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.453706 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.453759 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.453771 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.453795 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.453808 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:29Z","lastTransitionTime":"2025-12-03T19:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.557458 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.557543 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.557561 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.557628 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.557645 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:29Z","lastTransitionTime":"2025-12-03T19:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.660872 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.660958 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.660982 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.661018 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.661041 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:29Z","lastTransitionTime":"2025-12-03T19:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.764670 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.764732 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.764748 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.764773 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.764790 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:29Z","lastTransitionTime":"2025-12-03T19:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.867401 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.867512 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.867532 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.867560 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.867612 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:29Z","lastTransitionTime":"2025-12-03T19:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.971334 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.971399 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.971418 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.971443 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:29 crc kubenswrapper[4916]: I1203 19:30:29.971460 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:29Z","lastTransitionTime":"2025-12-03T19:30:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.075085 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.075194 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.075214 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.075237 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.075255 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.179096 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.179167 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.179184 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.179211 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.179228 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.282761 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.282845 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.282870 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.282902 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.282926 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.386130 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.386191 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.386203 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.386220 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.386258 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.426942 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.427016 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.427029 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.427055 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.427069 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: E1203 19:30:30.448375 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:30Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.455204 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.455259 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.455280 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.455309 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.455329 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: E1203 19:30:30.475282 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:30Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.477766 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.477924 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.478221 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:30 crc kubenswrapper[4916]: E1203 19:30:30.478188 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.478546 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:30 crc kubenswrapper[4916]: E1203 19:30:30.478785 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:30 crc kubenswrapper[4916]: E1203 19:30:30.478942 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:30 crc kubenswrapper[4916]: E1203 19:30:30.479196 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.481001 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.481049 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.481067 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.481101 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.481160 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: E1203 19:30:30.530741 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:30Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.538753 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.539223 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.539319 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.539412 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.539523 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: E1203 19:30:30.559981 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:30Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.565534 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.565827 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.565901 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.565976 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.566044 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: E1203 19:30:30.581214 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:30Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:30 crc kubenswrapper[4916]: E1203 19:30:30.581822 4916 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.583600 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.583646 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.583659 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.583679 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.583693 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.687255 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.687319 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.687351 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.687383 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.687404 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.791090 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.791166 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.791188 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.791218 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.791243 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.894086 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.894177 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.894198 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.894250 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.894273 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.997417 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.997471 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.997490 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.997514 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:30 crc kubenswrapper[4916]: I1203 19:30:30.997532 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:30Z","lastTransitionTime":"2025-12-03T19:30:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.099904 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.099990 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.100005 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.100026 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.100042 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:31Z","lastTransitionTime":"2025-12-03T19:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.203292 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.203371 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.203394 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.203422 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.203443 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:31Z","lastTransitionTime":"2025-12-03T19:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.307006 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.307097 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.307117 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.307152 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.307179 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:31Z","lastTransitionTime":"2025-12-03T19:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.409824 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.409884 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.409901 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.409921 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.409936 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:31Z","lastTransitionTime":"2025-12-03T19:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.513708 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.513759 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.513771 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.513791 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.513803 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:31Z","lastTransitionTime":"2025-12-03T19:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.616677 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.616740 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.616766 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.616794 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.616812 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:31Z","lastTransitionTime":"2025-12-03T19:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.719729 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.719808 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.719835 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.719861 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.719878 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:31Z","lastTransitionTime":"2025-12-03T19:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.828885 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.828962 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.829013 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.829039 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.829059 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:31Z","lastTransitionTime":"2025-12-03T19:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.932738 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.933046 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.933167 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.933297 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:31 crc kubenswrapper[4916]: I1203 19:30:31.933420 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:31Z","lastTransitionTime":"2025-12-03T19:30:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.036824 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.036879 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.036897 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.036919 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.036938 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:32Z","lastTransitionTime":"2025-12-03T19:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.140025 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.140087 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.140146 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.140180 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.140200 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:32Z","lastTransitionTime":"2025-12-03T19:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.243177 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.243243 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.243260 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.243283 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.243300 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:32Z","lastTransitionTime":"2025-12-03T19:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.346661 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.346722 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.346731 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.346745 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.346754 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:32Z","lastTransitionTime":"2025-12-03T19:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.449156 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.449194 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.449206 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.449223 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.449233 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:32Z","lastTransitionTime":"2025-12-03T19:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.477244 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.477261 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:32 crc kubenswrapper[4916]: E1203 19:30:32.477385 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:32 crc kubenswrapper[4916]: E1203 19:30:32.477442 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.477279 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:32 crc kubenswrapper[4916]: E1203 19:30:32.477521 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.478062 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:32 crc kubenswrapper[4916]: E1203 19:30:32.478364 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.552077 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.552114 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.552123 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.552136 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.552145 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:32Z","lastTransitionTime":"2025-12-03T19:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.654509 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.654590 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.654599 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.654612 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.654625 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:32Z","lastTransitionTime":"2025-12-03T19:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.757647 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.757748 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.757778 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.757795 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.757810 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:32Z","lastTransitionTime":"2025-12-03T19:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.861214 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.861315 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.861335 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.861359 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.861376 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:32Z","lastTransitionTime":"2025-12-03T19:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.964742 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.964810 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.964827 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.964852 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:32 crc kubenswrapper[4916]: I1203 19:30:32.964873 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:32Z","lastTransitionTime":"2025-12-03T19:30:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.068331 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.068411 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.068434 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.068467 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.068492 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:33Z","lastTransitionTime":"2025-12-03T19:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.171162 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.171200 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.171210 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.171224 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.171233 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:33Z","lastTransitionTime":"2025-12-03T19:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.273792 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.273831 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.273841 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.273854 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.273865 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:33Z","lastTransitionTime":"2025-12-03T19:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.376952 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.377013 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.377050 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.377078 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.377103 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:33Z","lastTransitionTime":"2025-12-03T19:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.479484 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.479539 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.479555 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.479614 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.479631 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:33Z","lastTransitionTime":"2025-12-03T19:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.582443 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.582525 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.582549 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.582609 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.582628 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:33Z","lastTransitionTime":"2025-12-03T19:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.686193 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.686259 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.686283 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.686314 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.686337 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:33Z","lastTransitionTime":"2025-12-03T19:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.789591 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.789658 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.789679 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.789705 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.789723 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:33Z","lastTransitionTime":"2025-12-03T19:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.892516 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.892644 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.892676 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.892709 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.892734 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:33Z","lastTransitionTime":"2025-12-03T19:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.996819 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.996887 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.996906 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.996937 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:33 crc kubenswrapper[4916]: I1203 19:30:33.996956 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:33Z","lastTransitionTime":"2025-12-03T19:30:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.100121 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.100189 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.100208 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.100232 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.100251 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:34Z","lastTransitionTime":"2025-12-03T19:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.204367 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.204435 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.204451 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.204477 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.204494 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:34Z","lastTransitionTime":"2025-12-03T19:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.307087 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.307144 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.307153 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.307168 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.307178 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:34Z","lastTransitionTime":"2025-12-03T19:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.409708 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.409769 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.409789 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.409818 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.409839 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:34Z","lastTransitionTime":"2025-12-03T19:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.477304 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.477510 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.477624 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.477643 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:34 crc kubenswrapper[4916]: E1203 19:30:34.478678 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:34 crc kubenswrapper[4916]: E1203 19:30:34.478651 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:34 crc kubenswrapper[4916]: E1203 19:30:34.478883 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.478981 4916 scope.go:117] "RemoveContainer" containerID="2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d" Dec 03 19:30:34 crc kubenswrapper[4916]: E1203 19:30:34.479049 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.501556 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.512789 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.512846 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.512867 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.512895 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.512917 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:34Z","lastTransitionTime":"2025-12-03T19:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.520346 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.534695 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.552447 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.564554 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.584175 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"\\\\nI1203 19:30:18.424323 6356 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 19:30:18.424375 6356 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 19:30:18.424423 6356 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 19:30:18.424457 6356 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 19:30:18.424536 6356 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:18.424579 6356 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:18.424609 6356 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:18.424626 6356 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:18.426716 6356 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:18.426755 6356 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:18.426786 6356 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:18.426797 6356 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:18.426831 6356 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 19:30:18.426847 6356 factory.go:656] Stopping watch factory\\\\nI1203 19:30:18.426848 6356 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:18.426866 6356 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.598458 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.611477 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.615136 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.615177 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.615190 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.615208 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.615219 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:34Z","lastTransitionTime":"2025-12-03T19:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.636941 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.654811 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.667685 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.680219 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.696530 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.710200 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.717321 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.717359 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.717369 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.717384 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.717396 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:34Z","lastTransitionTime":"2025-12-03T19:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.722430 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.744400 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.757880 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.780422 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:34 crc kubenswrapper[4916]: E1203 19:30:34.780557 4916 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:34 crc kubenswrapper[4916]: E1203 19:30:34.780646 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs podName:9ae5584e-d1d9-4aa9-955a-41bdf15f0461 nodeName:}" failed. No retries permitted until 2025-12-03 19:30:50.780628059 +0000 UTC m=+66.743438325 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs") pod "network-metrics-daemon-kbxgw" (UID: "9ae5584e-d1d9-4aa9-955a-41bdf15f0461") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.819296 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.819336 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.819354 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.819371 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.819381 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:34Z","lastTransitionTime":"2025-12-03T19:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.840668 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/1.log" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.843237 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46"} Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.843868 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.853540 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.855137 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.873688 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.875854 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.897047 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.919962 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"\\\\nI1203 19:30:18.424323 6356 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 19:30:18.424375 6356 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 19:30:18.424423 6356 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 19:30:18.424457 6356 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 19:30:18.424536 6356 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:18.424579 6356 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:18.424609 6356 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:18.424626 6356 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:18.426716 6356 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:18.426755 6356 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:18.426786 6356 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:18.426797 6356 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:18.426831 6356 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 19:30:18.426847 6356 factory.go:656] Stopping watch factory\\\\nI1203 19:30:18.426848 6356 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:18.426866 6356 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.921131 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.921170 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.921183 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.921198 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.921209 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:34Z","lastTransitionTime":"2025-12-03T19:30:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.939539 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.956630 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.967116 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.981046 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:34 crc kubenswrapper[4916]: I1203 19:30:34.995667 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:34Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.006720 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.023354 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.023385 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.023392 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.023407 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.023415 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:35Z","lastTransitionTime":"2025-12-03T19:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.033150 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.048465 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.064100 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.076193 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.092544 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.105305 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.118357 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.125520 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.125546 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.125554 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.125601 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.125612 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:35Z","lastTransitionTime":"2025-12-03T19:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.136980 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.149144 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.159820 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.172025 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.185090 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.195678 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.208395 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.220881 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.228391 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.228478 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.228504 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.228536 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.228560 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:35Z","lastTransitionTime":"2025-12-03T19:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.235450 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.245189 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.255510 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.266833 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.280132 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.294142 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.308276 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.317056 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.331241 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.331266 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.331275 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.331289 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.331299 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:35Z","lastTransitionTime":"2025-12-03T19:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.339475 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"\\\\nI1203 19:30:18.424323 6356 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 19:30:18.424375 6356 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 19:30:18.424423 6356 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 19:30:18.424457 6356 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 19:30:18.424536 6356 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:18.424579 6356 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:18.424609 6356 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:18.424626 6356 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:18.426716 6356 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:18.426755 6356 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:18.426786 6356 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:18.426797 6356 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:18.426831 6356 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 19:30:18.426847 6356 factory.go:656] Stopping watch factory\\\\nI1203 19:30:18.426848 6356 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:18.426866 6356 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.349518 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.434756 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.434823 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.434837 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.434859 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.434872 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:35Z","lastTransitionTime":"2025-12-03T19:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.538401 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.538458 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.538474 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.538498 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.538515 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:35Z","lastTransitionTime":"2025-12-03T19:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.641670 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.642155 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.642181 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.642212 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.642237 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:35Z","lastTransitionTime":"2025-12-03T19:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.746282 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.746732 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.746979 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.747194 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.747407 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:35Z","lastTransitionTime":"2025-12-03T19:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.848076 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/2.log" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.848594 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/1.log" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.850076 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.850257 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.850320 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.850388 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.850453 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:35Z","lastTransitionTime":"2025-12-03T19:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.851721 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46" exitCode=1 Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.852763 4916 scope.go:117] "RemoveContainer" containerID="a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46" Dec 03 19:30:35 crc kubenswrapper[4916]: E1203 19:30:35.852881 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.853003 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46"} Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.853036 4916 scope.go:117] "RemoveContainer" containerID="2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.873774 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.885639 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.904345 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2328d631716858a814c7607c1a5639cc1504ca7dd33310d408a4c3f255a41a6d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"\\\\nI1203 19:30:18.424323 6356 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 19:30:18.424375 6356 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 19:30:18.424423 6356 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 19:30:18.424457 6356 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 19:30:18.424536 6356 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 19:30:18.424579 6356 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 19:30:18.424609 6356 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 19:30:18.424626 6356 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 19:30:18.426716 6356 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 19:30:18.426755 6356 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 19:30:18.426786 6356 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1203 19:30:18.426797 6356 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1203 19:30:18.426831 6356 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1203 19:30:18.426847 6356 factory.go:656] Stopping watch factory\\\\nI1203 19:30:18.426848 6356 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1203 19:30:18.426866 6356 ovnkube.go:599] Stopped ovnkube\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:17Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:35Z\\\",\\\"message\\\":\\\"6554 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298738 6554 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-fcbx4 in node crc\\\\nI1203 19:30:35.298742 6554 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-fcbx4 after 0 failed attempt(s)\\\\nI1203 19:30:35.298747 6554 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298757 6554 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nF1203 19:30:35.298759 6554 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet v\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.916899 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.931228 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.953418 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.953918 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.954042 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.954122 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.954198 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:35Z","lastTransitionTime":"2025-12-03T19:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.957453 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.973206 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.986233 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:35 crc kubenswrapper[4916]: I1203 19:30:35.998083 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:35Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.012499 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.027224 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.038679 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.055968 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.057262 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.057305 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.057315 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.057331 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.057344 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:36Z","lastTransitionTime":"2025-12-03T19:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.070541 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.088086 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.104400 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.128433 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.143965 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.159830 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.159874 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.159883 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.159898 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.159911 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:36Z","lastTransitionTime":"2025-12-03T19:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.262063 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.262129 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.262143 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.262171 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.262189 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:36Z","lastTransitionTime":"2025-12-03T19:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.298506 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.298627 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.298857 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.298922 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.298868 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.298949 4916 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.298972 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.298996 4916 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.299079 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 19:31:08.29904147 +0000 UTC m=+84.261851776 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.299118 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 19:31:08.299102491 +0000 UTC m=+84.261912797 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.365249 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.365348 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.365370 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.365413 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.365437 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:36Z","lastTransitionTime":"2025-12-03T19:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.400253 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.400493 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:31:08.400455797 +0000 UTC m=+84.363266083 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.400557 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.400687 4916 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.400721 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.400734 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:31:08.400725884 +0000 UTC m=+84.363536410 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.400890 4916 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.401006 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:31:08.40097681 +0000 UTC m=+84.363787116 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.469104 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.469168 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.469182 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.469206 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.469220 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:36Z","lastTransitionTime":"2025-12-03T19:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.477770 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.477795 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.477781 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.477920 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.477957 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.478098 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.478181 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.478391 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.571780 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.571831 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.571842 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.571859 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.571872 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:36Z","lastTransitionTime":"2025-12-03T19:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.675045 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.675090 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.675103 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.675142 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.675153 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:36Z","lastTransitionTime":"2025-12-03T19:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.777379 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.777453 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.777476 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.777505 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.777526 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:36Z","lastTransitionTime":"2025-12-03T19:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.859998 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/2.log" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.863920 4916 scope.go:117] "RemoveContainer" containerID="a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46" Dec 03 19:30:36 crc kubenswrapper[4916]: E1203 19:30:36.864111 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.880381 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.880428 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.880438 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.880453 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.880463 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:36Z","lastTransitionTime":"2025-12-03T19:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.884018 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.899291 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.914772 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.930881 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.947282 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.961017 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.977304 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.983163 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.983230 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.983248 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.983272 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.983289 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:36Z","lastTransitionTime":"2025-12-03T19:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:36 crc kubenswrapper[4916]: I1203 19:30:36.992678 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:36Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.014202 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:37Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.027918 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:37Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.041496 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:37Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.061634 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:37Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.084615 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:37Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.087602 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.087645 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.087685 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.087703 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.087715 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:37Z","lastTransitionTime":"2025-12-03T19:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.101455 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:37Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.121205 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:37Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.132222 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:37Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.157670 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:35Z\\\",\\\"message\\\":\\\"6554 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298738 6554 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-fcbx4 in node crc\\\\nI1203 19:30:35.298742 6554 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-fcbx4 after 0 failed attempt(s)\\\\nI1203 19:30:35.298747 6554 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298757 6554 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nF1203 19:30:35.298759 6554 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet v\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:37Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.171071 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:37Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.190036 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.190117 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.190129 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.190147 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.190159 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:37Z","lastTransitionTime":"2025-12-03T19:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.293172 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.293251 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.293276 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.293306 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.293328 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:37Z","lastTransitionTime":"2025-12-03T19:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.396908 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.396975 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.396992 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.397016 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.397033 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:37Z","lastTransitionTime":"2025-12-03T19:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.499767 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.499843 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.499865 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.499897 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.499918 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:37Z","lastTransitionTime":"2025-12-03T19:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.603493 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.603558 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.603635 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.603661 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.603678 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:37Z","lastTransitionTime":"2025-12-03T19:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.706718 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.706786 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.706800 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.706820 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.706832 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:37Z","lastTransitionTime":"2025-12-03T19:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.809127 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.809168 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.809179 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.809196 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.809207 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:37Z","lastTransitionTime":"2025-12-03T19:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.911748 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.911806 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.911828 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.911855 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:37 crc kubenswrapper[4916]: I1203 19:30:37.911881 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:37Z","lastTransitionTime":"2025-12-03T19:30:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.014458 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.014597 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.014624 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.014657 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.014679 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:38Z","lastTransitionTime":"2025-12-03T19:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.116751 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.116792 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.116802 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.116815 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.116826 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:38Z","lastTransitionTime":"2025-12-03T19:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.219944 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.220007 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.220024 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.220048 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.220064 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:38Z","lastTransitionTime":"2025-12-03T19:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.322398 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.322458 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.322475 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.322497 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.322514 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:38Z","lastTransitionTime":"2025-12-03T19:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.425267 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.425374 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.425392 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.425417 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.425435 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:38Z","lastTransitionTime":"2025-12-03T19:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.477672 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.477726 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:38 crc kubenswrapper[4916]: E1203 19:30:38.477888 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.477713 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.478738 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:38 crc kubenswrapper[4916]: E1203 19:30:38.478897 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:38 crc kubenswrapper[4916]: E1203 19:30:38.479102 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:38 crc kubenswrapper[4916]: E1203 19:30:38.478720 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.528265 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.528312 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.528324 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.528341 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.528354 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:38Z","lastTransitionTime":"2025-12-03T19:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.631766 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.631812 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.631824 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.631844 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.631859 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:38Z","lastTransitionTime":"2025-12-03T19:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.735073 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.735110 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.735124 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.735144 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.735158 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:38Z","lastTransitionTime":"2025-12-03T19:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.838699 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.838746 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.838758 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.838774 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.838785 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:38Z","lastTransitionTime":"2025-12-03T19:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.941736 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.941777 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.941790 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.941806 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:38 crc kubenswrapper[4916]: I1203 19:30:38.941817 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:38Z","lastTransitionTime":"2025-12-03T19:30:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.044489 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.044524 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.044535 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.044552 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.044580 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:39Z","lastTransitionTime":"2025-12-03T19:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.147248 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.147280 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.147290 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.147306 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.147318 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:39Z","lastTransitionTime":"2025-12-03T19:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.249889 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.249931 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.249939 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.249952 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.249961 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:39Z","lastTransitionTime":"2025-12-03T19:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.352995 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.353037 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.353047 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.353061 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.353069 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:39Z","lastTransitionTime":"2025-12-03T19:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.455482 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.455537 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.455549 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.455583 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.455594 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:39Z","lastTransitionTime":"2025-12-03T19:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.557850 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.557915 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.557933 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.557958 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.557979 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:39Z","lastTransitionTime":"2025-12-03T19:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.660586 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.660620 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.660633 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.660648 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.660659 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:39Z","lastTransitionTime":"2025-12-03T19:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.763679 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.763745 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.763761 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.763779 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.763792 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:39Z","lastTransitionTime":"2025-12-03T19:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.866475 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.866535 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.866552 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.866603 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.866621 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:39Z","lastTransitionTime":"2025-12-03T19:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.969921 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.969987 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.970004 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.970031 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:39 crc kubenswrapper[4916]: I1203 19:30:39.970048 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:39Z","lastTransitionTime":"2025-12-03T19:30:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.073024 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.073427 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.073527 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.073655 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.073758 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.177125 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.177156 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.177165 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.177179 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.177188 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.279888 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.279950 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.279966 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.279989 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.280005 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.382248 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.382288 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.382299 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.382317 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.382328 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.479426 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:40 crc kubenswrapper[4916]: E1203 19:30:40.479535 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.479770 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.479831 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.479913 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:40 crc kubenswrapper[4916]: E1203 19:30:40.479846 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:40 crc kubenswrapper[4916]: E1203 19:30:40.480110 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:40 crc kubenswrapper[4916]: E1203 19:30:40.480188 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.484186 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.484238 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.484260 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.484284 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.484301 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.587397 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.587467 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.587705 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.587744 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.587767 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.689814 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.689851 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.689878 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.689894 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.689904 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.793407 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.793540 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.793645 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.793729 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.793754 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.814865 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.815046 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.815074 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.815105 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.815127 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: E1203 19:30:40.840867 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:40Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.845597 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.845653 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.845670 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.845693 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.845711 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: E1203 19:30:40.865556 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:40Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.870693 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.870747 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.870755 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.870770 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.870780 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: E1203 19:30:40.889157 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:40Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.895179 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.895235 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.895246 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.895263 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.895276 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: E1203 19:30:40.915092 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:40Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.919891 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.919958 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.919975 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.920000 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.920016 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:40 crc kubenswrapper[4916]: E1203 19:30:40.938920 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:40Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:40 crc kubenswrapper[4916]: E1203 19:30:40.939044 4916 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.941250 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.941321 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.941347 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.941380 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:40 crc kubenswrapper[4916]: I1203 19:30:40.941402 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:40Z","lastTransitionTime":"2025-12-03T19:30:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.045089 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.045163 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.045188 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.045212 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.045229 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:41Z","lastTransitionTime":"2025-12-03T19:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.148835 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.148876 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.148889 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.148906 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.148918 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:41Z","lastTransitionTime":"2025-12-03T19:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.252323 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.252379 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.252402 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.252430 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.252453 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:41Z","lastTransitionTime":"2025-12-03T19:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.355142 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.355230 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.355251 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.355307 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.355328 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:41Z","lastTransitionTime":"2025-12-03T19:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.457912 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.458027 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.458051 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.458082 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.458106 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:41Z","lastTransitionTime":"2025-12-03T19:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.560189 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.560221 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.560231 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.560246 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.560256 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:41Z","lastTransitionTime":"2025-12-03T19:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.663486 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.663554 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.663619 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.663649 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.663673 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:41Z","lastTransitionTime":"2025-12-03T19:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.766513 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.766579 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.766592 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.766612 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.766622 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:41Z","lastTransitionTime":"2025-12-03T19:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.870182 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.870244 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.870262 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.870286 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.870305 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:41Z","lastTransitionTime":"2025-12-03T19:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.974166 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.974259 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.974279 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.974316 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:41 crc kubenswrapper[4916]: I1203 19:30:41.974340 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:41Z","lastTransitionTime":"2025-12-03T19:30:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.077435 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.077491 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.077509 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.077538 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.077556 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:42Z","lastTransitionTime":"2025-12-03T19:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.180202 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.180284 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.180307 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.180337 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.180358 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:42Z","lastTransitionTime":"2025-12-03T19:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.283764 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.283837 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.283850 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.283875 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.283892 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:42Z","lastTransitionTime":"2025-12-03T19:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.386041 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.386106 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.386116 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.386137 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.386149 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:42Z","lastTransitionTime":"2025-12-03T19:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.477489 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.477641 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.477725 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:42 crc kubenswrapper[4916]: E1203 19:30:42.477938 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.477989 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:42 crc kubenswrapper[4916]: E1203 19:30:42.478149 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:42 crc kubenswrapper[4916]: E1203 19:30:42.478328 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:42 crc kubenswrapper[4916]: E1203 19:30:42.478413 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.489229 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.489307 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.489318 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.489354 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.489365 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:42Z","lastTransitionTime":"2025-12-03T19:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.592467 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.592532 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.592551 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.592605 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.592627 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:42Z","lastTransitionTime":"2025-12-03T19:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.696118 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.696196 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.696210 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.696227 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.696239 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:42Z","lastTransitionTime":"2025-12-03T19:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.798806 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.798838 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.798846 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.798858 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.798866 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:42Z","lastTransitionTime":"2025-12-03T19:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.901036 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.901081 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.901091 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.901111 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:42 crc kubenswrapper[4916]: I1203 19:30:42.901122 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:42Z","lastTransitionTime":"2025-12-03T19:30:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.004297 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.004374 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.004396 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.004425 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.004445 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:43Z","lastTransitionTime":"2025-12-03T19:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.107981 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.108038 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.108052 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.108073 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.108090 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:43Z","lastTransitionTime":"2025-12-03T19:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.210853 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.210898 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.210906 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.210920 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.210929 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:43Z","lastTransitionTime":"2025-12-03T19:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.313328 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.313366 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.313375 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.313389 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.313399 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:43Z","lastTransitionTime":"2025-12-03T19:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.416645 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.416698 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.416709 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.416725 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.416735 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:43Z","lastTransitionTime":"2025-12-03T19:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.519477 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.519561 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.519627 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.519655 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.519672 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:43Z","lastTransitionTime":"2025-12-03T19:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.622987 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.623062 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.623086 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.623117 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.623137 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:43Z","lastTransitionTime":"2025-12-03T19:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.726200 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.726243 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.726255 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.726273 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.726285 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:43Z","lastTransitionTime":"2025-12-03T19:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.829082 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.829190 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.829276 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.829316 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.829340 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:43Z","lastTransitionTime":"2025-12-03T19:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.932783 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.932851 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.932866 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.932898 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:43 crc kubenswrapper[4916]: I1203 19:30:43.932915 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:43Z","lastTransitionTime":"2025-12-03T19:30:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.036768 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.036878 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.036889 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.036908 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.036920 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:44Z","lastTransitionTime":"2025-12-03T19:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.139820 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.139867 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.139878 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.139893 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.139904 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:44Z","lastTransitionTime":"2025-12-03T19:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.242186 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.242258 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.242276 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.242299 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.242320 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:44Z","lastTransitionTime":"2025-12-03T19:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.345181 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.345236 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.345254 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.345278 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.345293 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:44Z","lastTransitionTime":"2025-12-03T19:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.447790 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.447838 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.447848 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.447863 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.447873 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:44Z","lastTransitionTime":"2025-12-03T19:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.478022 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.478097 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.478095 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:44 crc kubenswrapper[4916]: E1203 19:30:44.478228 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:44 crc kubenswrapper[4916]: E1203 19:30:44.478348 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.478465 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:44 crc kubenswrapper[4916]: E1203 19:30:44.478538 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:44 crc kubenswrapper[4916]: E1203 19:30:44.478630 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.495983 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.520593 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.538237 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.551707 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.551840 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.551899 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.551924 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.551941 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:44Z","lastTransitionTime":"2025-12-03T19:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.556176 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.568688 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.586478 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.605785 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.622141 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.637133 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.649266 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.658106 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.658149 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.658163 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.658181 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.658196 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:44Z","lastTransitionTime":"2025-12-03T19:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.662624 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.675512 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.690141 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.703550 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.721132 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.734646 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.756218 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:35Z\\\",\\\"message\\\":\\\"6554 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298738 6554 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-fcbx4 in node crc\\\\nI1203 19:30:35.298742 6554 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-fcbx4 after 0 failed attempt(s)\\\\nI1203 19:30:35.298747 6554 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298757 6554 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nF1203 19:30:35.298759 6554 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet v\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.761404 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.761454 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.761462 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.761477 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.761489 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:44Z","lastTransitionTime":"2025-12-03T19:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.769674 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:44Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.864645 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.864696 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.864708 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.864724 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.864737 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:44Z","lastTransitionTime":"2025-12-03T19:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.967686 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.967749 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.967768 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.967792 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:44 crc kubenswrapper[4916]: I1203 19:30:44.967809 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:44Z","lastTransitionTime":"2025-12-03T19:30:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.071012 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.071095 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.071108 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.071127 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.071139 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:45Z","lastTransitionTime":"2025-12-03T19:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.173782 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.173829 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.173840 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.173855 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.173870 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:45Z","lastTransitionTime":"2025-12-03T19:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.277235 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.277295 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.277318 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.277348 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.277370 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:45Z","lastTransitionTime":"2025-12-03T19:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.380660 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.380713 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.380727 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.380745 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.380758 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:45Z","lastTransitionTime":"2025-12-03T19:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.484054 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.484449 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.484466 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.484491 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.484508 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:45Z","lastTransitionTime":"2025-12-03T19:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.587191 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.587308 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.587332 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.587355 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.587372 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:45Z","lastTransitionTime":"2025-12-03T19:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.690393 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.690449 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.690464 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.690480 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.690490 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:45Z","lastTransitionTime":"2025-12-03T19:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.793008 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.793046 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.793055 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.793069 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.793078 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:45Z","lastTransitionTime":"2025-12-03T19:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.895557 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.895673 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.895819 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.895848 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.895870 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:45Z","lastTransitionTime":"2025-12-03T19:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.998667 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.998721 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.998732 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.998748 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:45 crc kubenswrapper[4916]: I1203 19:30:45.998760 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:45Z","lastTransitionTime":"2025-12-03T19:30:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.101860 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.101925 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.101948 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.101980 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.102003 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:46Z","lastTransitionTime":"2025-12-03T19:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.204983 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.205030 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.205041 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.205059 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.205070 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:46Z","lastTransitionTime":"2025-12-03T19:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.307454 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.307501 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.307517 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.307539 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.307553 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:46Z","lastTransitionTime":"2025-12-03T19:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.410151 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.410192 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.410205 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.410219 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.410229 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:46Z","lastTransitionTime":"2025-12-03T19:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.477600 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.477676 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:46 crc kubenswrapper[4916]: E1203 19:30:46.477774 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.477816 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:46 crc kubenswrapper[4916]: E1203 19:30:46.477911 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:46 crc kubenswrapper[4916]: E1203 19:30:46.478124 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.477620 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:46 crc kubenswrapper[4916]: E1203 19:30:46.478490 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.512392 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.512534 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.512558 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.512606 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.512624 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:46Z","lastTransitionTime":"2025-12-03T19:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.615214 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.615283 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.615304 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.615332 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.615354 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:46Z","lastTransitionTime":"2025-12-03T19:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.718644 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.718684 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.718695 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.718710 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.718721 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:46Z","lastTransitionTime":"2025-12-03T19:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.821207 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.822190 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.822534 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.822721 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.822872 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:46Z","lastTransitionTime":"2025-12-03T19:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.925731 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.925858 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.925875 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.925892 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:46 crc kubenswrapper[4916]: I1203 19:30:46.925903 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:46Z","lastTransitionTime":"2025-12-03T19:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.027901 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.028129 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.028200 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.028259 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.028326 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:47Z","lastTransitionTime":"2025-12-03T19:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.130656 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.130882 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.130977 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.131054 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.131113 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:47Z","lastTransitionTime":"2025-12-03T19:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.234081 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.234136 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.234149 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.234166 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.234178 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:47Z","lastTransitionTime":"2025-12-03T19:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.337334 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.337403 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.337415 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.337439 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.337454 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:47Z","lastTransitionTime":"2025-12-03T19:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.441008 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.441071 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.441089 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.441113 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.441130 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:47Z","lastTransitionTime":"2025-12-03T19:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.543142 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.543179 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.543188 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.543201 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.543210 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:47Z","lastTransitionTime":"2025-12-03T19:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.646015 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.646079 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.646094 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.646114 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.646131 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:47Z","lastTransitionTime":"2025-12-03T19:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.749830 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.749882 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.749897 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.749917 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.749931 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:47Z","lastTransitionTime":"2025-12-03T19:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.852889 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.852943 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.852956 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.852973 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.852984 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:47Z","lastTransitionTime":"2025-12-03T19:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.955487 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.955530 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.955539 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.955554 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:47 crc kubenswrapper[4916]: I1203 19:30:47.955584 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:47Z","lastTransitionTime":"2025-12-03T19:30:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.058370 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.058672 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.058802 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.058900 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.058997 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:48Z","lastTransitionTime":"2025-12-03T19:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.162197 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.162477 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.162539 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.162634 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.162710 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:48Z","lastTransitionTime":"2025-12-03T19:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.266023 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.266084 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.266099 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.266125 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.266142 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:48Z","lastTransitionTime":"2025-12-03T19:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.369011 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.369048 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.369057 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.369071 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.369080 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:48Z","lastTransitionTime":"2025-12-03T19:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.471919 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.471979 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.471996 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.472020 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.472038 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:48Z","lastTransitionTime":"2025-12-03T19:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.477388 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.477430 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.477468 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:48 crc kubenswrapper[4916]: E1203 19:30:48.477512 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.477532 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:48 crc kubenswrapper[4916]: E1203 19:30:48.477683 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:48 crc kubenswrapper[4916]: E1203 19:30:48.477768 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:48 crc kubenswrapper[4916]: E1203 19:30:48.477834 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.574705 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.574788 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.574806 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.575213 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.575272 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:48Z","lastTransitionTime":"2025-12-03T19:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.678420 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.678471 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.678488 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.678510 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.678532 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:48Z","lastTransitionTime":"2025-12-03T19:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.780984 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.781023 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.781033 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.781046 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.781054 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:48Z","lastTransitionTime":"2025-12-03T19:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.883430 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.883483 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.883499 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.883520 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.883536 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:48Z","lastTransitionTime":"2025-12-03T19:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.985827 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.985891 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.985907 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.985936 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:48 crc kubenswrapper[4916]: I1203 19:30:48.985960 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:48Z","lastTransitionTime":"2025-12-03T19:30:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.087945 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.088005 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.088024 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.088048 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.088064 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:49Z","lastTransitionTime":"2025-12-03T19:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.190469 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.190580 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.190597 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.190626 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.190639 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:49Z","lastTransitionTime":"2025-12-03T19:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.293476 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.293516 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.293530 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.293548 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.293589 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:49Z","lastTransitionTime":"2025-12-03T19:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.395806 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.395849 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.395862 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.395879 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.395891 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:49Z","lastTransitionTime":"2025-12-03T19:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.498177 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.498241 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.498259 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.498287 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.498310 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:49Z","lastTransitionTime":"2025-12-03T19:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.601071 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.601135 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.601153 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.601180 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.601200 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:49Z","lastTransitionTime":"2025-12-03T19:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.703818 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.703861 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.703869 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.703885 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.703895 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:49Z","lastTransitionTime":"2025-12-03T19:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.806609 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.806638 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.806646 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.806659 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.806669 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:49Z","lastTransitionTime":"2025-12-03T19:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.908153 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.908189 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.908198 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.908211 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:49 crc kubenswrapper[4916]: I1203 19:30:49.908219 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:49Z","lastTransitionTime":"2025-12-03T19:30:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.010811 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.010854 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.010864 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.010881 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.010893 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:50Z","lastTransitionTime":"2025-12-03T19:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.113490 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.113520 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.113528 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.113541 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.113550 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:50Z","lastTransitionTime":"2025-12-03T19:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.216067 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.216118 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.216133 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.216151 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.216163 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:50Z","lastTransitionTime":"2025-12-03T19:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.318193 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.318259 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.318272 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.318288 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.318341 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:50Z","lastTransitionTime":"2025-12-03T19:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.421602 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.421648 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.421659 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.421677 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.421690 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:50Z","lastTransitionTime":"2025-12-03T19:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.477373 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.477424 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.477453 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.477522 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:50 crc kubenswrapper[4916]: E1203 19:30:50.477672 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:50 crc kubenswrapper[4916]: E1203 19:30:50.477905 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:50 crc kubenswrapper[4916]: E1203 19:30:50.478010 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:50 crc kubenswrapper[4916]: E1203 19:30:50.478162 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.523545 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.523623 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.523643 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.523667 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.523685 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:50Z","lastTransitionTime":"2025-12-03T19:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.626447 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.626506 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.626524 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.626548 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.626603 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:50Z","lastTransitionTime":"2025-12-03T19:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.728728 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.728805 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.728825 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.728850 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.728868 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:50Z","lastTransitionTime":"2025-12-03T19:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.831531 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.831590 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.831601 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.831616 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.831629 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:50Z","lastTransitionTime":"2025-12-03T19:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.878100 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:50 crc kubenswrapper[4916]: E1203 19:30:50.878279 4916 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:50 crc kubenswrapper[4916]: E1203 19:30:50.878355 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs podName:9ae5584e-d1d9-4aa9-955a-41bdf15f0461 nodeName:}" failed. No retries permitted until 2025-12-03 19:31:22.878332502 +0000 UTC m=+98.841142798 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs") pod "network-metrics-daemon-kbxgw" (UID: "9ae5584e-d1d9-4aa9-955a-41bdf15f0461") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.933233 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.933259 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.933268 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.933280 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:50 crc kubenswrapper[4916]: I1203 19:30:50.933289 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:50Z","lastTransitionTime":"2025-12-03T19:30:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.035788 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.035861 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.035880 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.035902 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.035918 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.054552 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.054687 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.054710 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.054731 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.054746 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: E1203 19:30:51.072316 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:51Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.076759 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.076813 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.076830 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.076852 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.076868 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: E1203 19:30:51.093848 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:51Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.097983 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.098073 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.098091 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.098167 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.098193 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: E1203 19:30:51.120225 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:51Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.123893 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.123917 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.123933 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.123957 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.123970 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: E1203 19:30:51.135164 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:51Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.138841 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.138868 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.138878 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.138890 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.138900 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: E1203 19:30:51.151068 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:51Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:51 crc kubenswrapper[4916]: E1203 19:30:51.151278 4916 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.153180 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.153222 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.153231 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.153246 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.153257 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.255599 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.255658 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.255679 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.255702 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.255720 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.358353 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.358405 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.358416 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.358434 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.358447 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.462514 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.462584 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.462594 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.462614 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.462626 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.566193 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.566272 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.566286 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.566307 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.566319 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.669166 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.669246 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.669262 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.669281 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.669292 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.772020 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.772173 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.772196 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.772267 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.772296 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.874543 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.874607 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.874618 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.874636 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.874651 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.915024 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4vkgz_d75c407a-2bbd-4cc3-bc0e-b1010aeeab57/kube-multus/0.log" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.915091 4916 generic.go:334] "Generic (PLEG): container finished" podID="d75c407a-2bbd-4cc3-bc0e-b1010aeeab57" containerID="18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969" exitCode=1 Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.915190 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4vkgz" event={"ID":"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57","Type":"ContainerDied","Data":"18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.915631 4916 scope.go:117] "RemoveContainer" containerID="18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.935859 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:51Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.950769 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:51Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.976890 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.976932 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.976942 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.976957 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.976968 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:51Z","lastTransitionTime":"2025-12-03T19:30:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.983377 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:35Z\\\",\\\"message\\\":\\\"6554 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298738 6554 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-fcbx4 in node crc\\\\nI1203 19:30:35.298742 6554 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-fcbx4 after 0 failed attempt(s)\\\\nI1203 19:30:35.298747 6554 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298757 6554 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nF1203 19:30:35.298759 6554 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet v\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:51Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:51 crc kubenswrapper[4916]: I1203 19:30:51.998042 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:51Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.030509 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.045243 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.061790 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.075686 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.079933 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.079960 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.079971 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.079990 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.080003 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:52Z","lastTransitionTime":"2025-12-03T19:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.095183 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"2025-12-03T19:30:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600\\\\n2025-12-03T19:30:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600 to /host/opt/cni/bin/\\\\n2025-12-03T19:30:06Z [verbose] multus-daemon started\\\\n2025-12-03T19:30:06Z [verbose] Readiness Indicator file check\\\\n2025-12-03T19:30:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.108639 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.120221 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.131105 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.146402 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.160172 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.182786 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.182836 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.182849 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.182868 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.182880 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:52Z","lastTransitionTime":"2025-12-03T19:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.185552 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.199786 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.211882 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.222401 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.286100 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.286507 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.286938 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.287345 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.287724 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:52Z","lastTransitionTime":"2025-12-03T19:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.390904 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.391204 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.391317 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.391441 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.391552 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:52Z","lastTransitionTime":"2025-12-03T19:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.478034 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:52 crc kubenswrapper[4916]: E1203 19:30:52.478366 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.478114 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.478063 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.478985 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.479163 4916 scope.go:117] "RemoveContainer" containerID="a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46" Dec 03 19:30:52 crc kubenswrapper[4916]: E1203 19:30:52.479236 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:52 crc kubenswrapper[4916]: E1203 19:30:52.479302 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:52 crc kubenswrapper[4916]: E1203 19:30:52.480699 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" Dec 03 19:30:52 crc kubenswrapper[4916]: E1203 19:30:52.481884 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.493426 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.493470 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.493483 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.493499 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.493511 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:52Z","lastTransitionTime":"2025-12-03T19:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.596072 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.596347 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.596443 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.596545 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.596674 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:52Z","lastTransitionTime":"2025-12-03T19:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.698950 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.699021 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.699042 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.699069 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.699091 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:52Z","lastTransitionTime":"2025-12-03T19:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.801969 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.802184 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.802255 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.802328 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.802444 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:52Z","lastTransitionTime":"2025-12-03T19:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.905652 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.905690 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.905700 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.905714 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.905726 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:52Z","lastTransitionTime":"2025-12-03T19:30:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.919779 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4vkgz_d75c407a-2bbd-4cc3-bc0e-b1010aeeab57/kube-multus/0.log" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.919834 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4vkgz" event={"ID":"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57","Type":"ContainerStarted","Data":"37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5"} Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.936120 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.947387 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.962080 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.974408 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:52 crc kubenswrapper[4916]: I1203 19:30:52.988508 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.000526 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:52Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.008418 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.008453 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.008469 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.008514 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.008525 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:53Z","lastTransitionTime":"2025-12-03T19:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.012509 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.024623 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.036444 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.045260 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.062972 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:35Z\\\",\\\"message\\\":\\\"6554 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298738 6554 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-fcbx4 in node crc\\\\nI1203 19:30:35.298742 6554 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-fcbx4 after 0 failed attempt(s)\\\\nI1203 19:30:35.298747 6554 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298757 6554 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nF1203 19:30:35.298759 6554 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet v\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.072490 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.082832 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.101974 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.110551 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.110625 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.110637 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.110655 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.110668 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:53Z","lastTransitionTime":"2025-12-03T19:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.115280 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.127501 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.139638 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.152112 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"2025-12-03T19:30:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600\\\\n2025-12-03T19:30:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600 to /host/opt/cni/bin/\\\\n2025-12-03T19:30:06Z [verbose] multus-daemon started\\\\n2025-12-03T19:30:06Z [verbose] Readiness Indicator file check\\\\n2025-12-03T19:30:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:53Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.213736 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.213789 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.213800 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.213817 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.213827 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:53Z","lastTransitionTime":"2025-12-03T19:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.315991 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.316037 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.316049 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.316064 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.316076 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:53Z","lastTransitionTime":"2025-12-03T19:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.418432 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.418499 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.418514 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.418537 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.418637 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:53Z","lastTransitionTime":"2025-12-03T19:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.521817 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.521935 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.521956 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.521981 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.522038 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:53Z","lastTransitionTime":"2025-12-03T19:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.624803 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.624875 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.624894 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.624910 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.624920 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:53Z","lastTransitionTime":"2025-12-03T19:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.726843 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.726881 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.726894 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.726909 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.726921 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:53Z","lastTransitionTime":"2025-12-03T19:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.829299 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.829343 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.829359 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.829374 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.829384 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:53Z","lastTransitionTime":"2025-12-03T19:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.931514 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.931551 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.931564 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.931579 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:53 crc kubenswrapper[4916]: I1203 19:30:53.931607 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:53Z","lastTransitionTime":"2025-12-03T19:30:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.034412 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.034474 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.034498 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.034528 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.034548 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:54Z","lastTransitionTime":"2025-12-03T19:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.137405 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.137472 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.137484 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.137524 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.137538 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:54Z","lastTransitionTime":"2025-12-03T19:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.240438 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.240509 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.240531 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.240558 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.240622 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:54Z","lastTransitionTime":"2025-12-03T19:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.343492 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.343664 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.343685 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.343708 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.343724 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:54Z","lastTransitionTime":"2025-12-03T19:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.446601 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.446647 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.446659 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.446677 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.446689 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:54Z","lastTransitionTime":"2025-12-03T19:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.477666 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.477689 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.477689 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:54 crc kubenswrapper[4916]: E1203 19:30:54.477776 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.477890 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:54 crc kubenswrapper[4916]: E1203 19:30:54.478021 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:54 crc kubenswrapper[4916]: E1203 19:30:54.478137 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:54 crc kubenswrapper[4916]: E1203 19:30:54.478225 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.498235 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.513534 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.538052 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:35Z\\\",\\\"message\\\":\\\"6554 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298738 6554 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-fcbx4 in node crc\\\\nI1203 19:30:35.298742 6554 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-fcbx4 after 0 failed attempt(s)\\\\nI1203 19:30:35.298747 6554 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298757 6554 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nF1203 19:30:35.298759 6554 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet v\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.549510 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.549556 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.549569 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.549608 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.549622 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:54Z","lastTransitionTime":"2025-12-03T19:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.554376 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.571361 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.593146 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.605754 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.625207 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.636416 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"2025-12-03T19:30:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600\\\\n2025-12-03T19:30:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600 to /host/opt/cni/bin/\\\\n2025-12-03T19:30:06Z [verbose] multus-daemon started\\\\n2025-12-03T19:30:06Z [verbose] Readiness Indicator file check\\\\n2025-12-03T19:30:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.645249 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.651221 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.651254 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.651264 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.651276 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.651284 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:54Z","lastTransitionTime":"2025-12-03T19:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.655125 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.663465 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.674569 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.683682 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.696367 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.706326 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.718879 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.730697 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:30:54Z is after 2025-08-24T17:21:41Z" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.753394 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.753424 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.753433 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.753449 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.753458 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:54Z","lastTransitionTime":"2025-12-03T19:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.856195 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.856241 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.856257 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.856278 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.856295 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:54Z","lastTransitionTime":"2025-12-03T19:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.958306 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.958353 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.958363 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.958377 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:54 crc kubenswrapper[4916]: I1203 19:30:54.958386 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:54Z","lastTransitionTime":"2025-12-03T19:30:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.061009 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.061069 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.061088 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.061112 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.061129 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:55Z","lastTransitionTime":"2025-12-03T19:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.163953 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.164009 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.164026 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.164050 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.164067 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:55Z","lastTransitionTime":"2025-12-03T19:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.266550 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.266605 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.266615 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.266628 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.266637 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:55Z","lastTransitionTime":"2025-12-03T19:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.369520 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.369627 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.369658 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.369689 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.369714 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:55Z","lastTransitionTime":"2025-12-03T19:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.472327 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.472363 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.472372 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.472385 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.472396 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:55Z","lastTransitionTime":"2025-12-03T19:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.574799 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.574854 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.574867 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.574882 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.574894 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:55Z","lastTransitionTime":"2025-12-03T19:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.677375 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.677427 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.677439 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.677457 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.677469 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:55Z","lastTransitionTime":"2025-12-03T19:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.779603 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.779652 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.779663 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.779680 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.779695 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:55Z","lastTransitionTime":"2025-12-03T19:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.881862 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.881916 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.881929 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.881947 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.881959 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:55Z","lastTransitionTime":"2025-12-03T19:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.984381 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.984443 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.984461 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.984487 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:55 crc kubenswrapper[4916]: I1203 19:30:55.984505 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:55Z","lastTransitionTime":"2025-12-03T19:30:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.086974 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.087027 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.087044 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.087067 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.087082 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:56Z","lastTransitionTime":"2025-12-03T19:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.189662 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.189694 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.189704 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.189721 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.189733 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:56Z","lastTransitionTime":"2025-12-03T19:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.292455 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.292494 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.292503 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.292517 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.292526 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:56Z","lastTransitionTime":"2025-12-03T19:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.395192 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.395256 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.395274 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.395299 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.395317 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:56Z","lastTransitionTime":"2025-12-03T19:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.477061 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:56 crc kubenswrapper[4916]: E1203 19:30:56.477245 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.477464 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:56 crc kubenswrapper[4916]: E1203 19:30:56.477531 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.477710 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:56 crc kubenswrapper[4916]: E1203 19:30:56.477775 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.477946 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:56 crc kubenswrapper[4916]: E1203 19:30:56.478175 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.497599 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.497648 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.497657 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.497669 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.497680 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:56Z","lastTransitionTime":"2025-12-03T19:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.600110 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.600168 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.600182 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.600199 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.600212 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:56Z","lastTransitionTime":"2025-12-03T19:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.703435 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.703478 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.703487 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.703501 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.703512 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:56Z","lastTransitionTime":"2025-12-03T19:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.805825 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.805865 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.805875 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.805889 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.805898 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:56Z","lastTransitionTime":"2025-12-03T19:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.907967 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.908004 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.908016 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.908030 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:56 crc kubenswrapper[4916]: I1203 19:30:56.908040 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:56Z","lastTransitionTime":"2025-12-03T19:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.011071 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.011108 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.011121 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.011134 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.011143 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:57Z","lastTransitionTime":"2025-12-03T19:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.113422 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.113460 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.113469 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.113480 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.113490 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:57Z","lastTransitionTime":"2025-12-03T19:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.215645 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.215717 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.215734 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.215759 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.215777 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:57Z","lastTransitionTime":"2025-12-03T19:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.318823 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.318874 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.318884 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.318901 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.318914 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:57Z","lastTransitionTime":"2025-12-03T19:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.421479 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.421517 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.421531 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.421547 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.421558 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:57Z","lastTransitionTime":"2025-12-03T19:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.523500 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.523540 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.523548 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.523565 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.523590 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:57Z","lastTransitionTime":"2025-12-03T19:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.625640 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.625730 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.625742 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.625761 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.625772 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:57Z","lastTransitionTime":"2025-12-03T19:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.728894 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.728921 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.728929 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.728941 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.728949 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:57Z","lastTransitionTime":"2025-12-03T19:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.832741 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.832805 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.832822 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.832845 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.832864 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:57Z","lastTransitionTime":"2025-12-03T19:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.935239 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.935287 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.935300 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.935319 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:57 crc kubenswrapper[4916]: I1203 19:30:57.935334 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:57Z","lastTransitionTime":"2025-12-03T19:30:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.038324 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.038361 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.038371 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.038386 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.038396 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:58Z","lastTransitionTime":"2025-12-03T19:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.141458 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.141500 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.141516 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.141537 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.141554 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:58Z","lastTransitionTime":"2025-12-03T19:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.244742 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.244793 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.244809 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.244832 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.244848 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:58Z","lastTransitionTime":"2025-12-03T19:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.347770 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.347833 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.347850 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.347873 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.347890 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:58Z","lastTransitionTime":"2025-12-03T19:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.450629 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.450694 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.450733 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.450764 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.450786 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:58Z","lastTransitionTime":"2025-12-03T19:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.477961 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.478093 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:30:58 crc kubenswrapper[4916]: E1203 19:30:58.478285 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.478343 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.478351 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:30:58 crc kubenswrapper[4916]: E1203 19:30:58.478512 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:30:58 crc kubenswrapper[4916]: E1203 19:30:58.478719 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:30:58 crc kubenswrapper[4916]: E1203 19:30:58.478831 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.553687 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.553758 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.553777 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.553807 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.553825 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:58Z","lastTransitionTime":"2025-12-03T19:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.657028 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.657115 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.657142 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.657174 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.657198 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:58Z","lastTransitionTime":"2025-12-03T19:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.760822 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.760944 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.760971 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.761008 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.761033 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:58Z","lastTransitionTime":"2025-12-03T19:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.863938 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.864244 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.864412 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.864572 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.864760 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:58Z","lastTransitionTime":"2025-12-03T19:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.967233 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.967709 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.967891 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.968110 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:58 crc kubenswrapper[4916]: I1203 19:30:58.968328 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:58Z","lastTransitionTime":"2025-12-03T19:30:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.071660 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.071713 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.071726 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.071744 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.071759 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:59Z","lastTransitionTime":"2025-12-03T19:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.174034 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.174087 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.174102 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.174123 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.174137 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:59Z","lastTransitionTime":"2025-12-03T19:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.277398 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.277509 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.277531 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.277554 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.277601 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:59Z","lastTransitionTime":"2025-12-03T19:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.381681 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.381736 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.381752 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.381775 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.381795 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:59Z","lastTransitionTime":"2025-12-03T19:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.483926 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.483989 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.484006 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.484030 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.484051 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:59Z","lastTransitionTime":"2025-12-03T19:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.586406 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.586464 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.586486 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.586514 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.586537 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:59Z","lastTransitionTime":"2025-12-03T19:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.689536 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.689633 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.689653 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.689684 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.689708 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:59Z","lastTransitionTime":"2025-12-03T19:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.792159 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.792434 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.792507 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.792604 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.792677 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:59Z","lastTransitionTime":"2025-12-03T19:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.895758 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.895819 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.895840 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.895865 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.895883 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:59Z","lastTransitionTime":"2025-12-03T19:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.997901 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.997950 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.997961 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.997979 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:30:59 crc kubenswrapper[4916]: I1203 19:30:59.997994 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:30:59Z","lastTransitionTime":"2025-12-03T19:30:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.100961 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.101016 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.101026 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.101041 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.101050 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:00Z","lastTransitionTime":"2025-12-03T19:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.204013 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.204554 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.204648 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.204730 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.204810 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:00Z","lastTransitionTime":"2025-12-03T19:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.308231 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.308287 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.308304 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.308328 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.308345 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:00Z","lastTransitionTime":"2025-12-03T19:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.411009 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.411052 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.411061 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.411077 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.411334 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:00Z","lastTransitionTime":"2025-12-03T19:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.477201 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.477189 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.478018 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:00 crc kubenswrapper[4916]: E1203 19:31:00.478367 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.478824 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:00 crc kubenswrapper[4916]: E1203 19:31:00.479001 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:00 crc kubenswrapper[4916]: E1203 19:31:00.479361 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:00 crc kubenswrapper[4916]: E1203 19:31:00.479800 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.514251 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.514306 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.514319 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.514341 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.514356 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:00Z","lastTransitionTime":"2025-12-03T19:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.617500 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.617631 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.617652 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.617681 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.617700 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:00Z","lastTransitionTime":"2025-12-03T19:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.721236 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.721302 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.721315 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.721343 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.721358 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:00Z","lastTransitionTime":"2025-12-03T19:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.824625 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.824688 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.824712 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.824745 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.824769 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:00Z","lastTransitionTime":"2025-12-03T19:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.927434 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.927512 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.927572 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.927703 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:00 crc kubenswrapper[4916]: I1203 19:31:00.927731 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:00Z","lastTransitionTime":"2025-12-03T19:31:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.029709 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.029756 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.029769 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.029789 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.029802 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.132734 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.132772 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.132782 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.132799 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.132811 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.236011 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.236045 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.236055 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.236072 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.236081 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.338828 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.338858 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.338865 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.338878 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.338886 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.370109 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.370182 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.370204 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.370234 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.370255 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: E1203 19:31:01.392841 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:01Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.398012 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.398044 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.398052 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.398064 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.398074 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: E1203 19:31:01.409844 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:01Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.413837 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.413882 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.413893 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.413918 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.413968 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: E1203 19:31:01.426137 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:01Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.431039 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.431096 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.431106 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.431120 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.431129 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: E1203 19:31:01.443765 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:01Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.448166 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.448225 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.448249 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.448280 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.448303 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: E1203 19:31:01.465670 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:01Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:01 crc kubenswrapper[4916]: E1203 19:31:01.465774 4916 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.467404 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.467497 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.467520 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.467604 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.467632 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.570953 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.571013 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.571030 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.571057 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.571080 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.674970 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.675055 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.675079 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.675109 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.675132 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.778501 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.778546 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.778593 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.778617 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.778635 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.881154 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.881220 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.881245 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.881269 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.881288 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.983661 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.983722 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.983740 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.983764 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:01 crc kubenswrapper[4916]: I1203 19:31:01.983782 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:01Z","lastTransitionTime":"2025-12-03T19:31:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.086657 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.086703 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.086711 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.086725 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.086734 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:02Z","lastTransitionTime":"2025-12-03T19:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.189701 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.189767 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.189789 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.189812 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.189831 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:02Z","lastTransitionTime":"2025-12-03T19:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.292860 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.292942 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.292960 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.293008 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.293041 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:02Z","lastTransitionTime":"2025-12-03T19:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.402230 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.402275 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.402313 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.402330 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.402342 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:02Z","lastTransitionTime":"2025-12-03T19:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.477262 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.477352 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.477300 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:02 crc kubenswrapper[4916]: E1203 19:31:02.477513 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:02 crc kubenswrapper[4916]: E1203 19:31:02.477651 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:02 crc kubenswrapper[4916]: E1203 19:31:02.477834 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.477871 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:02 crc kubenswrapper[4916]: E1203 19:31:02.477967 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.505657 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.505731 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.505767 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.505788 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.505800 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:02Z","lastTransitionTime":"2025-12-03T19:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.609072 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.609196 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.609214 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.609254 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.609274 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:02Z","lastTransitionTime":"2025-12-03T19:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.712802 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.712879 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.712901 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.712938 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.712981 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:02Z","lastTransitionTime":"2025-12-03T19:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.816397 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.816436 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.816445 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.816459 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.816470 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:02Z","lastTransitionTime":"2025-12-03T19:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.919400 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.919457 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.919472 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.919494 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:02 crc kubenswrapper[4916]: I1203 19:31:02.919507 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:02Z","lastTransitionTime":"2025-12-03T19:31:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.024522 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.024594 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.024606 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.024623 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.024636 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:03Z","lastTransitionTime":"2025-12-03T19:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.126262 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.126294 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.126302 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.126314 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.126322 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:03Z","lastTransitionTime":"2025-12-03T19:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.228880 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.228920 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.228934 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.228952 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.228963 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:03Z","lastTransitionTime":"2025-12-03T19:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.332299 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.332367 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.332380 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.332399 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.332411 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:03Z","lastTransitionTime":"2025-12-03T19:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.434940 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.434996 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.435008 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.435041 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.435049 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:03Z","lastTransitionTime":"2025-12-03T19:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.491361 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.537659 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.537700 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.537716 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.537736 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.537752 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:03Z","lastTransitionTime":"2025-12-03T19:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.640437 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.640496 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.640512 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.640536 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.640552 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:03Z","lastTransitionTime":"2025-12-03T19:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.743843 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.743919 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.743960 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.743992 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.744016 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:03Z","lastTransitionTime":"2025-12-03T19:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.845990 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.846021 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.846030 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.846043 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.846055 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:03Z","lastTransitionTime":"2025-12-03T19:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.949092 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.949127 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.949139 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.949153 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:03 crc kubenswrapper[4916]: I1203 19:31:03.949162 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:03Z","lastTransitionTime":"2025-12-03T19:31:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.052673 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.052748 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.052770 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.052800 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.052822 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:04Z","lastTransitionTime":"2025-12-03T19:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.155426 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.155466 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.155478 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.155522 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.155534 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:04Z","lastTransitionTime":"2025-12-03T19:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.258191 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.258318 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.258344 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.258374 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.258399 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:04Z","lastTransitionTime":"2025-12-03T19:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.361748 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.361797 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.361809 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.361827 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.361839 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:04Z","lastTransitionTime":"2025-12-03T19:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.464369 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.464422 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.464438 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.464460 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.464478 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:04Z","lastTransitionTime":"2025-12-03T19:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.477124 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.477184 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:04 crc kubenswrapper[4916]: E1203 19:31:04.477308 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.477373 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.477411 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:04 crc kubenswrapper[4916]: E1203 19:31:04.477673 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:04 crc kubenswrapper[4916]: E1203 19:31:04.477857 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:04 crc kubenswrapper[4916]: E1203 19:31:04.477915 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.511924 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:35Z\\\",\\\"message\\\":\\\"6554 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298738 6554 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-fcbx4 in node crc\\\\nI1203 19:30:35.298742 6554 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-fcbx4 after 0 failed attempt(s)\\\\nI1203 19:30:35.298747 6554 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298757 6554 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nF1203 19:30:35.298759 6554 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet v\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.534792 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.551099 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d3f11ad-2052-4a50-a66f-090baff4eec5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e751bd44282d50876324e02cf4f3eea3dbc28a7c3ba136fe3259fae0ce0098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.567644 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.567722 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.567739 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.567765 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.567783 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:04Z","lastTransitionTime":"2025-12-03T19:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.570830 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.586744 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.603195 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.624723 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"2025-12-03T19:30:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600\\\\n2025-12-03T19:30:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600 to /host/opt/cni/bin/\\\\n2025-12-03T19:30:06Z [verbose] multus-daemon started\\\\n2025-12-03T19:30:06Z [verbose] Readiness Indicator file check\\\\n2025-12-03T19:30:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.637610 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.662070 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.670473 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.670528 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.670536 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.670556 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.670588 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:04Z","lastTransitionTime":"2025-12-03T19:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.680300 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.704408 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.721032 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.745805 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.763877 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.772899 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.772934 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.772944 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.772956 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.772966 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:04Z","lastTransitionTime":"2025-12-03T19:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.781434 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.799420 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.816860 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.835793 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.856082 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:04Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.876313 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.876675 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.876793 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.876887 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.876996 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:04Z","lastTransitionTime":"2025-12-03T19:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.980154 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.980215 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.980237 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.980270 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:04 crc kubenswrapper[4916]: I1203 19:31:04.980294 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:04Z","lastTransitionTime":"2025-12-03T19:31:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.083752 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.083803 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.083816 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.083835 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.083850 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:05Z","lastTransitionTime":"2025-12-03T19:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.187403 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.187493 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.187634 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.187679 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.187719 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:05Z","lastTransitionTime":"2025-12-03T19:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.291813 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.291872 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.291884 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.291907 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.291924 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:05Z","lastTransitionTime":"2025-12-03T19:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.395664 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.395736 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.395752 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.395780 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.395801 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:05Z","lastTransitionTime":"2025-12-03T19:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.498350 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.498423 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.498440 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.498463 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.498481 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:05Z","lastTransitionTime":"2025-12-03T19:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.601240 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.601277 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.601288 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.601304 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.601317 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:05Z","lastTransitionTime":"2025-12-03T19:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.703868 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.703933 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.703953 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.703978 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.703996 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:05Z","lastTransitionTime":"2025-12-03T19:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.807593 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.807655 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.807698 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.807719 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.811448 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:05Z","lastTransitionTime":"2025-12-03T19:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.914240 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.914888 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.915030 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.915152 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:05 crc kubenswrapper[4916]: I1203 19:31:05.915310 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:05Z","lastTransitionTime":"2025-12-03T19:31:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.018086 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.018154 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.018172 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.018198 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.018216 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:06Z","lastTransitionTime":"2025-12-03T19:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.121073 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.121135 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.121152 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.121176 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.121194 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:06Z","lastTransitionTime":"2025-12-03T19:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.225055 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.225117 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.225131 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.225150 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.225162 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:06Z","lastTransitionTime":"2025-12-03T19:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.328523 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.328600 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.328617 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.328637 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.328650 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:06Z","lastTransitionTime":"2025-12-03T19:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.430974 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.431036 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.431058 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.431087 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.431105 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:06Z","lastTransitionTime":"2025-12-03T19:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.478023 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.478083 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:06 crc kubenswrapper[4916]: E1203 19:31:06.478223 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.478310 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.478348 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:06 crc kubenswrapper[4916]: E1203 19:31:06.478491 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:06 crc kubenswrapper[4916]: E1203 19:31:06.478650 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:06 crc kubenswrapper[4916]: E1203 19:31:06.478727 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.534068 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.534123 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.534140 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.534165 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.534181 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:06Z","lastTransitionTime":"2025-12-03T19:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.637908 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.637956 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.637966 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.637984 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.637995 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:06Z","lastTransitionTime":"2025-12-03T19:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.742021 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.742079 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.742092 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.742114 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.742127 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:06Z","lastTransitionTime":"2025-12-03T19:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.845485 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.845585 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.845600 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.845626 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.845643 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:06Z","lastTransitionTime":"2025-12-03T19:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.948811 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.948920 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.948941 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.948972 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:06 crc kubenswrapper[4916]: I1203 19:31:06.948992 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:06Z","lastTransitionTime":"2025-12-03T19:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.052711 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.052763 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.052779 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.052808 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.052827 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:07Z","lastTransitionTime":"2025-12-03T19:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.155531 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.155647 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.155670 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.155700 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.155723 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:07Z","lastTransitionTime":"2025-12-03T19:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.259352 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.259418 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.259438 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.259465 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.259485 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:07Z","lastTransitionTime":"2025-12-03T19:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.362412 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.362480 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.362493 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.362509 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.362522 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:07Z","lastTransitionTime":"2025-12-03T19:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.465087 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.465703 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.465730 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.465750 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.465765 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:07Z","lastTransitionTime":"2025-12-03T19:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.478421 4916 scope.go:117] "RemoveContainer" containerID="a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.568296 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.568915 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.568933 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.568993 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.569009 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:07Z","lastTransitionTime":"2025-12-03T19:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.672636 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.672757 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.672779 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.672834 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.672856 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:07Z","lastTransitionTime":"2025-12-03T19:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.775421 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.775460 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.775469 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.775484 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.775494 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:07Z","lastTransitionTime":"2025-12-03T19:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.878230 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.878303 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.878320 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.878347 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.878364 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:07Z","lastTransitionTime":"2025-12-03T19:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.980895 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.980936 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.980944 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.980959 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:07 crc kubenswrapper[4916]: I1203 19:31:07.980968 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:07Z","lastTransitionTime":"2025-12-03T19:31:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.084038 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.084103 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.084118 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.084137 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.084149 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:08Z","lastTransitionTime":"2025-12-03T19:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.186751 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.186801 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.186814 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.186877 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.186897 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:08Z","lastTransitionTime":"2025-12-03T19:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.290282 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.290350 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.290373 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.290402 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.290424 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:08Z","lastTransitionTime":"2025-12-03T19:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.370186 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.370289 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.370462 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.370497 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.370530 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.370553 4916 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.370502 4916 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.370661 4916 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.370689 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.370658028 +0000 UTC m=+148.333468334 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.370728 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.37070805 +0000 UTC m=+148.333518346 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.393978 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.394052 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.394075 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.394106 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.394130 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:08Z","lastTransitionTime":"2025-12-03T19:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.471186 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.471423 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.471375624 +0000 UTC m=+148.434185930 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.471520 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.471704 4916 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.471741 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.471788 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.471764444 +0000 UTC m=+148.434574710 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.472009 4916 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.472138 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.472112013 +0000 UTC m=+148.434922489 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.477225 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.477325 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.477329 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.477434 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.477588 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.477785 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.477933 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:08 crc kubenswrapper[4916]: E1203 19:31:08.478112 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.496406 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.496467 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.496484 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.496507 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.496524 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:08Z","lastTransitionTime":"2025-12-03T19:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.598867 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.598920 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.598934 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.598960 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.598974 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:08Z","lastTransitionTime":"2025-12-03T19:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.702375 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.702417 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.702428 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.702442 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.702453 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:08Z","lastTransitionTime":"2025-12-03T19:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.805649 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.805696 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.805712 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.805734 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.805750 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:08Z","lastTransitionTime":"2025-12-03T19:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.908594 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.908643 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.908654 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.908671 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.908683 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:08Z","lastTransitionTime":"2025-12-03T19:31:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.980307 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/2.log" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.983195 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213"} Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.983857 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:31:08 crc kubenswrapper[4916]: I1203 19:31:08.995851 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d3f11ad-2052-4a50-a66f-090baff4eec5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e751bd44282d50876324e02cf4f3eea3dbc28a7c3ba136fe3259fae0ce0098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:08Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.010939 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.011485 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.011539 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.011554 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.011601 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.011621 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:09Z","lastTransitionTime":"2025-12-03T19:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.022819 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.053442 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:35Z\\\",\\\"message\\\":\\\"6554 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298738 6554 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-fcbx4 in node crc\\\\nI1203 19:30:35.298742 6554 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-fcbx4 after 0 failed attempt(s)\\\\nI1203 19:30:35.298747 6554 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298757 6554 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nF1203 19:30:35.298759 6554 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet v\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:31:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.068790 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.094467 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.107226 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.113873 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.113914 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.113926 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.113944 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.113954 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:09Z","lastTransitionTime":"2025-12-03T19:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.121156 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.133922 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.150825 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"2025-12-03T19:30:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600\\\\n2025-12-03T19:30:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600 to /host/opt/cni/bin/\\\\n2025-12-03T19:30:06Z [verbose] multus-daemon started\\\\n2025-12-03T19:30:06Z [verbose] Readiness Indicator file check\\\\n2025-12-03T19:30:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.164722 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.182277 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.199155 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.216364 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.216403 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.216411 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.216424 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.216434 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:09Z","lastTransitionTime":"2025-12-03T19:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.220499 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.232448 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.249817 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.263850 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.280821 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.294885 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:09Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.319308 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.319355 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.319366 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.319383 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.319394 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:09Z","lastTransitionTime":"2025-12-03T19:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.422487 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.422527 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.422538 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.422555 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.422605 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:09Z","lastTransitionTime":"2025-12-03T19:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.525513 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.525617 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.525641 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.525672 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.525695 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:09Z","lastTransitionTime":"2025-12-03T19:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.629419 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.629467 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.629480 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.629497 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.629508 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:09Z","lastTransitionTime":"2025-12-03T19:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.732692 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.732776 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.732800 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.732839 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.732866 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:09Z","lastTransitionTime":"2025-12-03T19:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.839785 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.839870 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.839939 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.839978 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.840103 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:09Z","lastTransitionTime":"2025-12-03T19:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.944236 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.944288 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.944301 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.944323 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.944336 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:09Z","lastTransitionTime":"2025-12-03T19:31:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.991193 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/3.log" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.992722 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/2.log" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.995475 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" exitCode=1 Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.995513 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213"} Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.995564 4916 scope.go:117] "RemoveContainer" containerID="a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46" Dec 03 19:31:09 crc kubenswrapper[4916]: I1203 19:31:09.996771 4916 scope.go:117] "RemoveContainer" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" Dec 03 19:31:09 crc kubenswrapper[4916]: E1203 19:31:09.997041 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.016347 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"2025-12-03T19:30:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600\\\\n2025-12-03T19:30:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600 to /host/opt/cni/bin/\\\\n2025-12-03T19:30:06Z [verbose] multus-daemon started\\\\n2025-12-03T19:30:06Z [verbose] Readiness Indicator file check\\\\n2025-12-03T19:30:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.032986 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.047592 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.047629 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.047639 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.047672 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.047686 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:10Z","lastTransitionTime":"2025-12-03T19:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.060395 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.080018 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.096420 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.111875 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.124356 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.134587 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.148106 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.149674 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.149699 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.149707 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.149719 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.149729 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:10Z","lastTransitionTime":"2025-12-03T19:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.160393 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.171928 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.184674 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.196296 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.206232 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.215722 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.225031 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d3f11ad-2052-4a50-a66f-090baff4eec5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e751bd44282d50876324e02cf4f3eea3dbc28a7c3ba136fe3259fae0ce0098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.236790 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.248242 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.252233 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.252289 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.252305 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.252329 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.252346 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:10Z","lastTransitionTime":"2025-12-03T19:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.273883 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0cc61413d07bb4b781c469053c2022b527dea7db49ca08057bc118953938a46\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:35Z\\\",\\\"message\\\":\\\"6554 obj_retry.go:365] Adding new object: *v1.Pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298738 6554 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-fcbx4 in node crc\\\\nI1203 19:30:35.298742 6554 obj_retry.go:386] Retry successful for *v1.Pod openshift-dns/node-resolver-fcbx4 after 0 failed attempt(s)\\\\nI1203 19:30:35.298747 6554 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-fcbx4\\\\nI1203 19:30:35.298757 6554 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nF1203 19:30:35.298759 6554 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet v\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:31:09Z\\\",\\\"message\\\":\\\"16029 6982 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc in node crc\\\\nI1203 19:31:09.416030 6982 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-q4hms after 0 failed attempt(s)\\\\nI1203 19:31:09.416045 6982 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-q4hms\\\\nI1203 19:31:09.416045 6982 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc after 0 failed attempt(s)\\\\nI1203 19:31:09.415939 6982 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 19:31:09.416057 6982 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc\\\\nI1203 19:31:09.416061 6982 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nF1203 19:31:09.415846 6982 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:31:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:10Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.355688 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.355747 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.355761 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.355781 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.355796 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:10Z","lastTransitionTime":"2025-12-03T19:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.457966 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.458012 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.458028 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.458050 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.458066 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:10Z","lastTransitionTime":"2025-12-03T19:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.478804 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.478878 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:10 crc kubenswrapper[4916]: E1203 19:31:10.478953 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:10 crc kubenswrapper[4916]: E1203 19:31:10.479063 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.479153 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:10 crc kubenswrapper[4916]: E1203 19:31:10.479258 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.479290 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:10 crc kubenswrapper[4916]: E1203 19:31:10.479364 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.561267 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.561308 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.561319 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.561336 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.561351 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:10Z","lastTransitionTime":"2025-12-03T19:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.663855 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.663924 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.663941 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.663967 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.663984 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:10Z","lastTransitionTime":"2025-12-03T19:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.766531 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.766636 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.766668 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.766697 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.766720 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:10Z","lastTransitionTime":"2025-12-03T19:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.869429 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.869531 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.869551 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.869603 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.869631 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:10Z","lastTransitionTime":"2025-12-03T19:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.972688 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.972738 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.972747 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.972763 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:10 crc kubenswrapper[4916]: I1203 19:31:10.972773 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:10Z","lastTransitionTime":"2025-12-03T19:31:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.001670 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/3.log" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.006471 4916 scope.go:117] "RemoveContainer" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" Dec 03 19:31:11 crc kubenswrapper[4916]: E1203 19:31:11.006630 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.019871 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.033207 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.057109 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.074084 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.077360 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.077415 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.077442 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.077489 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.077517 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.092508 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.107412 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.126863 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.144383 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.164080 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d3f11ad-2052-4a50-a66f-090baff4eec5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e751bd44282d50876324e02cf4f3eea3dbc28a7c3ba136fe3259fae0ce0098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.180393 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.182437 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.182549 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.182608 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.182641 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.182663 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.195725 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.222185 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:31:09Z\\\",\\\"message\\\":\\\"16029 6982 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc in node crc\\\\nI1203 19:31:09.416030 6982 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-q4hms after 0 failed attempt(s)\\\\nI1203 19:31:09.416045 6982 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-q4hms\\\\nI1203 19:31:09.416045 6982 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc after 0 failed attempt(s)\\\\nI1203 19:31:09.415939 6982 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 19:31:09.416057 6982 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc\\\\nI1203 19:31:09.416061 6982 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nF1203 19:31:09.415846 6982 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:31:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.237408 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.263308 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.286136 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.286791 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.286832 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.286841 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.286857 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.286866 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.306709 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.325457 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.348271 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"2025-12-03T19:30:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600\\\\n2025-12-03T19:30:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600 to /host/opt/cni/bin/\\\\n2025-12-03T19:30:06Z [verbose] multus-daemon started\\\\n2025-12-03T19:30:06Z [verbose] Readiness Indicator file check\\\\n2025-12-03T19:30:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.366118 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.390101 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.390188 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.390219 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.390250 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.390274 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.494316 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.494436 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.494465 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.494541 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.494595 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.597460 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.597560 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.597606 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.597629 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.597646 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.680482 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.680603 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.680629 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.680677 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.680704 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: E1203 19:31:11.704000 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.708816 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.708902 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.708953 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.708980 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.708996 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: E1203 19:31:11.724954 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.729977 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.730028 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.730045 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.730070 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.730086 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: E1203 19:31:11.751048 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.756724 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.756794 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.756820 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.756851 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.756874 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: E1203 19:31:11.774759 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.779670 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.779745 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.779767 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.779814 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.779840 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: E1203 19:31:11.799858 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:11Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:11 crc kubenswrapper[4916]: E1203 19:31:11.800190 4916 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.802219 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.802283 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.802301 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.802329 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.802347 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.905435 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.905492 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.905515 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.905535 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:11 crc kubenswrapper[4916]: I1203 19:31:11.905547 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:11Z","lastTransitionTime":"2025-12-03T19:31:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.022762 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.022831 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.022854 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.022884 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.022908 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:12Z","lastTransitionTime":"2025-12-03T19:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.125631 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.125688 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.125705 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.125730 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.125747 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:12Z","lastTransitionTime":"2025-12-03T19:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.228666 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.228746 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.228769 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.228801 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.228839 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:12Z","lastTransitionTime":"2025-12-03T19:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.331407 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.331465 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.331483 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.331513 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.331536 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:12Z","lastTransitionTime":"2025-12-03T19:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.434790 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.434871 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.434896 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.434931 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.434956 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:12Z","lastTransitionTime":"2025-12-03T19:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.477395 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.477464 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.477640 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:12 crc kubenswrapper[4916]: E1203 19:31:12.477645 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.477826 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:12 crc kubenswrapper[4916]: E1203 19:31:12.477991 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:12 crc kubenswrapper[4916]: E1203 19:31:12.478039 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:12 crc kubenswrapper[4916]: E1203 19:31:12.478127 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.538902 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.538977 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.539003 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.539034 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.539057 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:12Z","lastTransitionTime":"2025-12-03T19:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.642242 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.642317 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.642344 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.642374 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.642398 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:12Z","lastTransitionTime":"2025-12-03T19:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.745644 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.745706 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.745717 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.745737 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.745750 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:12Z","lastTransitionTime":"2025-12-03T19:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.849293 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.849350 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.849370 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.849395 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.849409 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:12Z","lastTransitionTime":"2025-12-03T19:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.952790 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.952846 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.952864 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.952888 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:12 crc kubenswrapper[4916]: I1203 19:31:12.952906 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:12Z","lastTransitionTime":"2025-12-03T19:31:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.055877 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.055944 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.055966 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.055997 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.056037 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:13Z","lastTransitionTime":"2025-12-03T19:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.159547 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.159643 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.159667 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.159693 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.159712 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:13Z","lastTransitionTime":"2025-12-03T19:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.262395 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.262530 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.262559 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.262626 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.262650 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:13Z","lastTransitionTime":"2025-12-03T19:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.365270 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.365341 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.365365 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.365410 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.365439 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:13Z","lastTransitionTime":"2025-12-03T19:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.468380 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.468444 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.468471 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.468501 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.468523 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:13Z","lastTransitionTime":"2025-12-03T19:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.570757 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.570831 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.570855 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.570888 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.570912 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:13Z","lastTransitionTime":"2025-12-03T19:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.674079 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.674163 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.674187 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.674220 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.674243 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:13Z","lastTransitionTime":"2025-12-03T19:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.776733 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.776802 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.776828 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.776920 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.776962 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:13Z","lastTransitionTime":"2025-12-03T19:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.880100 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.880140 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.880150 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.880164 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.880172 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:13Z","lastTransitionTime":"2025-12-03T19:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.983478 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.983547 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.983620 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.983658 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:13 crc kubenswrapper[4916]: I1203 19:31:13.983683 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:13Z","lastTransitionTime":"2025-12-03T19:31:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.087674 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.087744 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.087769 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.087801 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.087824 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:14Z","lastTransitionTime":"2025-12-03T19:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.190459 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.190524 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.190549 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.190614 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.190639 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:14Z","lastTransitionTime":"2025-12-03T19:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.293060 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.293125 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.293142 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.293169 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.293186 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:14Z","lastTransitionTime":"2025-12-03T19:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.396719 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.396786 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.396809 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.396839 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.396875 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:14Z","lastTransitionTime":"2025-12-03T19:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.477272 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.477348 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:14 crc kubenswrapper[4916]: E1203 19:31:14.477413 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.477547 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:14 crc kubenswrapper[4916]: E1203 19:31:14.477724 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.477783 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:14 crc kubenswrapper[4916]: E1203 19:31:14.477882 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:14 crc kubenswrapper[4916]: E1203 19:31:14.478072 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.496977 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.499431 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.499486 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.499503 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.499524 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.499544 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:14Z","lastTransitionTime":"2025-12-03T19:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.520223 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.537054 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.561277 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.602715 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.602765 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.602780 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.602802 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.602818 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:14Z","lastTransitionTime":"2025-12-03T19:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.604634 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.621525 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.633144 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.644814 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.656240 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.667044 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.689904 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:31:09Z\\\",\\\"message\\\":\\\"16029 6982 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc in node crc\\\\nI1203 19:31:09.416030 6982 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-q4hms after 0 failed attempt(s)\\\\nI1203 19:31:09.416045 6982 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-q4hms\\\\nI1203 19:31:09.416045 6982 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc after 0 failed attempt(s)\\\\nI1203 19:31:09.415939 6982 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 19:31:09.416057 6982 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc\\\\nI1203 19:31:09.416061 6982 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nF1203 19:31:09.415846 6982 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:31:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.699633 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.704965 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.704996 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.705007 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.705021 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.705031 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:14Z","lastTransitionTime":"2025-12-03T19:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.710245 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d3f11ad-2052-4a50-a66f-090baff4eec5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e751bd44282d50876324e02cf4f3eea3dbc28a7c3ba136fe3259fae0ce0098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.722979 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.734446 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.745319 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.760025 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"2025-12-03T19:30:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600\\\\n2025-12-03T19:30:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600 to /host/opt/cni/bin/\\\\n2025-12-03T19:30:06Z [verbose] multus-daemon started\\\\n2025-12-03T19:30:06Z [verbose] Readiness Indicator file check\\\\n2025-12-03T19:30:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.772255 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.790065 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:14Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.808297 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.808368 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.808394 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.808426 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.808454 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:14Z","lastTransitionTime":"2025-12-03T19:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.911427 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.911470 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.911484 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.911502 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:14 crc kubenswrapper[4916]: I1203 19:31:14.911514 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:14Z","lastTransitionTime":"2025-12-03T19:31:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.014670 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.014723 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.014735 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.014754 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.014766 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:15Z","lastTransitionTime":"2025-12-03T19:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.118210 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.118258 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.118270 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.118291 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.118304 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:15Z","lastTransitionTime":"2025-12-03T19:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.221357 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.221399 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.221408 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.221446 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.221459 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:15Z","lastTransitionTime":"2025-12-03T19:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.324723 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.324790 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.324814 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.324843 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.324863 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:15Z","lastTransitionTime":"2025-12-03T19:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.427316 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.427353 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.427362 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.427376 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.427386 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:15Z","lastTransitionTime":"2025-12-03T19:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.530471 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.530509 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.530518 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.530531 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.530540 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:15Z","lastTransitionTime":"2025-12-03T19:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.633496 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.633604 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.633627 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.633660 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.633726 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:15Z","lastTransitionTime":"2025-12-03T19:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.737271 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.737318 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.737330 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.737348 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.737360 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:15Z","lastTransitionTime":"2025-12-03T19:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.839489 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.839559 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.839591 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.839629 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.839652 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:15Z","lastTransitionTime":"2025-12-03T19:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.942767 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.942829 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.942842 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.942864 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:15 crc kubenswrapper[4916]: I1203 19:31:15.942878 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:15Z","lastTransitionTime":"2025-12-03T19:31:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.045494 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.045557 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.045589 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.045610 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.045623 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:16Z","lastTransitionTime":"2025-12-03T19:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.148386 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.148442 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.148453 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.148474 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.148492 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:16Z","lastTransitionTime":"2025-12-03T19:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.251079 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.251124 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.251135 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.251154 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.251168 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:16Z","lastTransitionTime":"2025-12-03T19:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.355182 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.355300 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.355320 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.355349 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.355368 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:16Z","lastTransitionTime":"2025-12-03T19:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.458374 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.458416 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.458438 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.458461 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.458483 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:16Z","lastTransitionTime":"2025-12-03T19:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.477371 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.477471 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.477540 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.477385 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:16 crc kubenswrapper[4916]: E1203 19:31:16.477690 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:16 crc kubenswrapper[4916]: E1203 19:31:16.477870 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:16 crc kubenswrapper[4916]: E1203 19:31:16.477978 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:16 crc kubenswrapper[4916]: E1203 19:31:16.478066 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.560988 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.561067 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.561078 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.561102 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.561114 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:16Z","lastTransitionTime":"2025-12-03T19:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.663961 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.664014 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.664044 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.664063 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.664073 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:16Z","lastTransitionTime":"2025-12-03T19:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.767117 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.767168 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.767180 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.767195 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.767205 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:16Z","lastTransitionTime":"2025-12-03T19:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.869833 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.869878 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.869889 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.869908 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.869919 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:16Z","lastTransitionTime":"2025-12-03T19:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.973707 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.973785 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.973796 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.973815 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:16 crc kubenswrapper[4916]: I1203 19:31:16.973827 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:16Z","lastTransitionTime":"2025-12-03T19:31:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.076382 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.076432 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.076445 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.076459 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.076469 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:17Z","lastTransitionTime":"2025-12-03T19:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.178923 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.178996 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.179009 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.179035 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.179059 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:17Z","lastTransitionTime":"2025-12-03T19:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.281818 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.281866 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.281880 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.281898 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.281910 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:17Z","lastTransitionTime":"2025-12-03T19:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.384614 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.384653 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.384664 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.384677 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.384685 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:17Z","lastTransitionTime":"2025-12-03T19:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.491446 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.491501 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.491510 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.491533 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.491545 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:17Z","lastTransitionTime":"2025-12-03T19:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.595974 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.596062 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.596081 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.596106 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.596124 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:17Z","lastTransitionTime":"2025-12-03T19:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.699545 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.699637 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.699655 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.699679 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.699693 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:17Z","lastTransitionTime":"2025-12-03T19:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.803399 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.803510 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.803535 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.803594 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:17 crc kubenswrapper[4916]: I1203 19:31:17.803622 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:17Z","lastTransitionTime":"2025-12-03T19:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.294737 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.294807 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.294827 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.294853 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.294870 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:18Z","lastTransitionTime":"2025-12-03T19:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.397897 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.397971 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.397994 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.398024 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.398047 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:18Z","lastTransitionTime":"2025-12-03T19:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.477430 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.477657 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:18 crc kubenswrapper[4916]: E1203 19:31:18.477669 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:18 crc kubenswrapper[4916]: E1203 19:31:18.477796 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.477791 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.477901 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:18 crc kubenswrapper[4916]: E1203 19:31:18.478239 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:18 crc kubenswrapper[4916]: E1203 19:31:18.478418 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.500950 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.501017 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.501035 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.501061 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.501080 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:18Z","lastTransitionTime":"2025-12-03T19:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.603848 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.603907 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.603923 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.603948 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.603967 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:18Z","lastTransitionTime":"2025-12-03T19:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.708670 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.708716 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.708727 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.708746 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.708760 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:18Z","lastTransitionTime":"2025-12-03T19:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.812313 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.812390 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.812413 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.812451 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.812479 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:18Z","lastTransitionTime":"2025-12-03T19:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.916117 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.916156 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.916168 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.916185 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:18 crc kubenswrapper[4916]: I1203 19:31:18.916196 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:18Z","lastTransitionTime":"2025-12-03T19:31:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.018604 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.018652 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.018664 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.018680 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.018691 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:19Z","lastTransitionTime":"2025-12-03T19:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.120992 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.121049 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.121060 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.121077 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.121090 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:19Z","lastTransitionTime":"2025-12-03T19:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.223369 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.223494 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.223506 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.223524 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.223538 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:19Z","lastTransitionTime":"2025-12-03T19:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.325877 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.325929 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.325943 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.325963 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.325976 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:19Z","lastTransitionTime":"2025-12-03T19:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.429100 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.429166 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.429179 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.429198 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.429211 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:19Z","lastTransitionTime":"2025-12-03T19:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.530998 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.531048 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.531059 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.531075 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.531089 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:19Z","lastTransitionTime":"2025-12-03T19:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.634737 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.635113 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.635125 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.635144 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.635157 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:19Z","lastTransitionTime":"2025-12-03T19:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.738267 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.738334 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.738347 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.738362 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.738373 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:19Z","lastTransitionTime":"2025-12-03T19:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.841613 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.841667 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.841679 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.841696 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.841707 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:19Z","lastTransitionTime":"2025-12-03T19:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.943996 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.944048 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.944063 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.944085 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:19 crc kubenswrapper[4916]: I1203 19:31:19.944099 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:19Z","lastTransitionTime":"2025-12-03T19:31:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.047833 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.047923 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.047952 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.048001 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.048028 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:20Z","lastTransitionTime":"2025-12-03T19:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.152128 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.152196 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.152213 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.152236 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.152253 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:20Z","lastTransitionTime":"2025-12-03T19:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.254443 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.254493 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.254505 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.254522 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.254533 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:20Z","lastTransitionTime":"2025-12-03T19:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.357004 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.357050 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.357064 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.357080 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.357092 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:20Z","lastTransitionTime":"2025-12-03T19:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.459935 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.460024 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.460050 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.460084 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.460106 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:20Z","lastTransitionTime":"2025-12-03T19:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.477785 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.477839 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.477906 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.478006 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:20 crc kubenswrapper[4916]: E1203 19:31:20.478012 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:20 crc kubenswrapper[4916]: E1203 19:31:20.478146 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:20 crc kubenswrapper[4916]: E1203 19:31:20.478213 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:20 crc kubenswrapper[4916]: E1203 19:31:20.478322 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.562894 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.562930 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.562938 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.562952 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.562961 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:20Z","lastTransitionTime":"2025-12-03T19:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.665850 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.665911 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.665934 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.665956 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.665969 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:20Z","lastTransitionTime":"2025-12-03T19:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.769437 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.769499 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.769517 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.769542 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.769600 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:20Z","lastTransitionTime":"2025-12-03T19:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.872813 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.872865 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.872878 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.872900 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.872915 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:20Z","lastTransitionTime":"2025-12-03T19:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.975423 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.975501 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.975524 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.975556 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:20 crc kubenswrapper[4916]: I1203 19:31:20.975622 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:20Z","lastTransitionTime":"2025-12-03T19:31:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.078341 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.078398 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.078413 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.078435 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.078452 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:21Z","lastTransitionTime":"2025-12-03T19:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.181172 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.181221 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.181232 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.181250 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.181264 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:21Z","lastTransitionTime":"2025-12-03T19:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.284654 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.284782 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.284794 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.284809 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.284819 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:21Z","lastTransitionTime":"2025-12-03T19:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.388155 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.388200 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.388208 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.388222 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.388231 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:21Z","lastTransitionTime":"2025-12-03T19:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.490750 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.490779 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.490790 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.490809 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.490833 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:21Z","lastTransitionTime":"2025-12-03T19:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.593024 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.593110 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.593134 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.593167 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.593193 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:21Z","lastTransitionTime":"2025-12-03T19:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.696504 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.696601 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.696624 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.696652 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.696672 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:21Z","lastTransitionTime":"2025-12-03T19:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.800469 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.801338 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.801521 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.801750 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.801897 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:21Z","lastTransitionTime":"2025-12-03T19:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.905636 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.906507 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.906717 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.906953 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:21 crc kubenswrapper[4916]: I1203 19:31:21.907237 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:21Z","lastTransitionTime":"2025-12-03T19:31:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.010345 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.010405 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.010421 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.010449 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.010484 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.073448 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.073513 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.073525 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.073545 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.073558 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.091013 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:22Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.096134 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.096350 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.096601 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.096804 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.097001 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.117917 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:22Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.122845 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.122897 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.122914 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.122937 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.122954 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.143803 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:22Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.148353 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.148404 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.148416 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.148432 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.148444 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.162880 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:22Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.167553 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.167604 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.167615 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.167630 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.167640 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.183879 4916 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T19:31:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"41f46b78-490e-42ba-85e2-5e59d1446fea\\\",\\\"systemUUID\\\":\\\"9ad0a84f-4753-46e4-af5b-66c9c8ab0a3f\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:22Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.184061 4916 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.186203 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.186294 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.186308 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.186330 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.186345 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.290148 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.290234 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.290246 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.290264 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.290275 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.393359 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.393684 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.393812 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.393937 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.394045 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.478065 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.478129 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.478170 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.478202 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.478879 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.478954 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.478970 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.478979 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.496666 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.496701 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.496709 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.496723 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.496733 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.599694 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.599780 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.599801 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.599828 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.599845 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.702540 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.702672 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.702697 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.702729 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.702756 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.806676 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.806733 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.806745 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.806764 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.806777 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.909999 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.910091 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.910106 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.910130 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.910146 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:22Z","lastTransitionTime":"2025-12-03T19:31:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:22 crc kubenswrapper[4916]: I1203 19:31:22.961826 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.962031 4916 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:31:22 crc kubenswrapper[4916]: E1203 19:31:22.962154 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs podName:9ae5584e-d1d9-4aa9-955a-41bdf15f0461 nodeName:}" failed. No retries permitted until 2025-12-03 19:32:26.962126018 +0000 UTC m=+162.924936334 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs") pod "network-metrics-daemon-kbxgw" (UID: "9ae5584e-d1d9-4aa9-955a-41bdf15f0461") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.013748 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.013815 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.013824 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.013852 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.013863 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:23Z","lastTransitionTime":"2025-12-03T19:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.116683 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.116730 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.116742 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.116757 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.116769 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:23Z","lastTransitionTime":"2025-12-03T19:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.220163 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.220237 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.220256 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.220286 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.220304 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:23Z","lastTransitionTime":"2025-12-03T19:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.323291 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.323361 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.323373 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.323394 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.323409 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:23Z","lastTransitionTime":"2025-12-03T19:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.427040 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.427143 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.427171 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.427209 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.427240 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:23Z","lastTransitionTime":"2025-12-03T19:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.530921 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.531034 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.531708 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.531833 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.531848 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:23Z","lastTransitionTime":"2025-12-03T19:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.635706 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.635761 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.635778 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.635804 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.635819 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:23Z","lastTransitionTime":"2025-12-03T19:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.739473 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.739542 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.739616 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.739657 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.739685 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:23Z","lastTransitionTime":"2025-12-03T19:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.843315 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.843929 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.844079 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.844224 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.844370 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:23Z","lastTransitionTime":"2025-12-03T19:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.948403 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.948473 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.948490 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.948516 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:23 crc kubenswrapper[4916]: I1203 19:31:23.948533 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:23Z","lastTransitionTime":"2025-12-03T19:31:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.052885 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.053075 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.053145 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.053188 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.053213 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:24Z","lastTransitionTime":"2025-12-03T19:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.157002 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.157093 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.157120 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.157149 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.157168 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:24Z","lastTransitionTime":"2025-12-03T19:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.261080 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.261156 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.261183 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.261215 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.261239 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:24Z","lastTransitionTime":"2025-12-03T19:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.364399 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.364483 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.364507 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.364539 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.364596 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:24Z","lastTransitionTime":"2025-12-03T19:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.466968 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.467051 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.467075 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.467104 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.467123 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:24Z","lastTransitionTime":"2025-12-03T19:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.478035 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.478164 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.479104 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.479279 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:24 crc kubenswrapper[4916]: E1203 19:31:24.479471 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:24 crc kubenswrapper[4916]: E1203 19:31:24.479674 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:24 crc kubenswrapper[4916]: E1203 19:31:24.479869 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:24 crc kubenswrapper[4916]: E1203 19:31:24.480965 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.481368 4916 scope.go:117] "RemoveContainer" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" Dec 03 19:31:24 crc kubenswrapper[4916]: E1203 19:31:24.481813 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.496300 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d3f11ad-2052-4a50-a66f-090baff4eec5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89e751bd44282d50876324e02cf4f3eea3dbc28a7c3ba136fe3259fae0ce0098\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9474169cca35c37a78d0ce3e33473530a72318b20d3e9091bba2396248bfb21d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.521487 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634d62bec6d3e1ca233d1da249063d872e57fb5166bfd1c7d721fd10877158be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.538521 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-fcbx4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1aa2c320-e4a7-4032-a519-e36ba11108e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a2066b293f47eeaf15783b86e6197e4dee357577f561e2503409d78c292ed0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tkkqp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-fcbx4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.562502 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:31:09Z\\\",\\\"message\\\":\\\"16029 6982 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc in node crc\\\\nI1203 19:31:09.416030 6982 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-q4hms after 0 failed attempt(s)\\\\nI1203 19:31:09.416045 6982 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/machine-config-daemon-q4hms\\\\nI1203 19:31:09.416045 6982 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc after 0 failed attempt(s)\\\\nI1203 19:31:09.415939 6982 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1203 19:31:09.416057 6982 default_network_controller.go:776] Recording success event on pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc\\\\nI1203 19:31:09.416061 6982 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nF1203 19:31:09.415846 6982 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:31:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pc7lq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:05Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c9jfr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.570062 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.570418 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.570658 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.570780 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.570917 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:24Z","lastTransitionTime":"2025-12-03T19:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.577384 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-tpt4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d06fba7e-f02e-4eee-8907-405a69b5f7d5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b815ffb622b13c5077f42fb63701365653ea553422489cd84ec320cb9a3b74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s5hql\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-tpt4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.601745 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50f00718-e44e-4509-8e54-0e964ed7da3f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://274cbaa457a5e1458dd3f0eab3f1556b640745e999899e4270e2eba9485bec73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://984dcbc7933e0f4491ff20c08ca9856ec9d763f6a4d05e2b23c45656be58382c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1729fba46b6e3b5016032c3d575862a7c5010ec7646cd733db3f28ee84ee2080\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1a48adff06239ddfe7fa58a64f999b94c8171e01ea0819336a48b097936087\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b52af016a9e290f99022ee9e6bf095b78bc0f1b281f5aa39fbf7a548eb47e560\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b5acfeb9a3b7899a6ab702d4800bc63614778253bc166c557017000aa4bc61\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40a930cb3a7289fcc2b7739957c62d9b084f9374af834b5fd4b20c865d2b47fd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02210ff00f932f19c6835e94d888d8ca39b067bd76b5b1502fdee595de64c5e1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.619225 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2eb3523e-d42c-4762-ab12-8b5fdfeb3a47\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"le observer\\\\nW1203 19:30:03.746892 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1203 19:30:03.746985 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1203 19:30:03.749003 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2470302590/tls.crt::/tmp/serving-cert-2470302590/tls.key\\\\\\\"\\\\nI1203 19:30:04.013813 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1203 19:30:04.032115 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1203 19:30:04.032145 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1203 19:30:04.032174 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1203 19:30:04.032181 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1203 19:30:04.044537 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1203 19:30:04.044588 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044597 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1203 19:30:04.044606 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1203 19:30:04.044609 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1203 19:30:04.044628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1203 19:30:04.044631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1203 19:30:04.044791 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1203 19:30:04.046087 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.636879 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6fee5feb396c545d674b80d5d55d4dbbec4c89ff31b29965681c624515a22f1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d02aeaa865f4c376b2dbcb1650cf76028b7e058e4ccc6d9ebb8d8ed8dd119923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.654402 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.671951 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4vkgz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T19:30:51Z\\\",\\\"message\\\":\\\"2025-12-03T19:30:06+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600\\\\n2025-12-03T19:30:06+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_be78c459-2f5a-4749-a3ed-e4b664137600 to /host/opt/cni/bin/\\\\n2025-12-03T19:30:06Z [verbose] multus-daemon started\\\\n2025-12-03T19:30:06Z [verbose] Readiness Indicator file check\\\\n2025-12-03T19:30:51Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-22dsk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4vkgz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.673802 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.673854 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.673869 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.673884 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.673896 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:24Z","lastTransitionTime":"2025-12-03T19:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.684072 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5504feb0-62e6-45d1-8ca0-e7541ec0269f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f5ed6ed6ecabaa466f093242b5babcccfd38e58a7f78a3aaa2fbcf008b0eeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00a1f44b37ee770bbf477e63a233c1d41664c9ba81c1c3665702c085022f1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xf2dw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9mxpz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.699584 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f140fd9b-556c-4bba-aaab-b51ad89e7e3d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4df64b1b2532f47f348dca1d97baf589392d796fda9634de74aaf1a55735995\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13c3997382b2811f3b38d0d6f8baea6dc1e6a0a00bf927852a9a696571e2060d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c37517d7e213b58df423c0fa726b9e136a9a46682e6a252ffc8c8a5aa3ded6bf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.715991 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df9440ee302c90a2fe2ace869c1fb5d7e097ce7c2a39686e87be84f4b9bc6b16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.733904 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a1fc2b9-c813-42d4-badd-f1f81e57b667\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2787b9e5820cfb870f10b4f8a00a210f01fb93a813cdb9a8ffcf17c5841bf233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://220c61e8b46228a99057f7e75010cdffc971c1ade210f32291d2f346258f212a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8abe105fa63e55db8b9a8d6681b5fedcbdde553f7059f0cf7bc9f84dceb18a01\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bc00a801810d8da1b595aedbbe9792ba0d823601d87d499471fcfd1304708c78\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://28d37ed268f80865120ae9368e13d473f0a54d5b9ed8a1b943dcd429d7410b09\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://51037188cfbec8153ece6996e7b22ab8e56ed47ecb92f00cc510c1423bd28838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0327b6617adf15a50096c8add025fa0a6a5a93e8f64f93ada68388bf9fb88d14\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:30:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:30:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cwshh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-kp7gb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.747476 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-65df2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-kbxgw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.760995 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4eeded2b-f2a3-47ef-97dd-4a6c7c6a5921\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:29:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://68e0f9c119d251919b3ef752cad9dca779dda56ffc55e50148baf9f022904a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b90a37e790bde7ac8549324ef931b1a5c8e04ee5b677d8b622cfc001c99d9ebd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f862079a3b76dbaeef41d78290aa03eebf50ce3a569cf80b13ad2b79f6d058f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:29:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c4328d2f0da13f50748af8e134324f8606ddf362e49390354efe7f541918604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T19:29:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T19:29:45Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:29:44Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.777286 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.777328 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.777340 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.777359 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.777373 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:24Z","lastTransitionTime":"2025-12-03T19:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.777626 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.790656 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.803772 4916 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T19:30:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fd47f46ad15e6df9706c2a318f38d6e3dabf382a5e0870f4825bdb7cd5a42a6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T19:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pzj2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T19:30:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q4hms\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T19:31:24Z is after 2025-08-24T17:21:41Z" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.881798 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.881862 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.881876 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.881899 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.881915 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:24Z","lastTransitionTime":"2025-12-03T19:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.985940 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.985987 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.986001 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.986024 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:24 crc kubenswrapper[4916]: I1203 19:31:24.986041 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:24Z","lastTransitionTime":"2025-12-03T19:31:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.090886 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.090953 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.090977 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.091004 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.091028 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:25Z","lastTransitionTime":"2025-12-03T19:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.193112 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.193197 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.193209 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.193222 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.193231 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:25Z","lastTransitionTime":"2025-12-03T19:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.295550 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.295685 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.295703 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.295726 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.295741 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:25Z","lastTransitionTime":"2025-12-03T19:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.399721 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.399819 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.399832 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.399852 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.399865 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:25Z","lastTransitionTime":"2025-12-03T19:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.502968 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.503036 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.503056 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.503088 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.503109 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:25Z","lastTransitionTime":"2025-12-03T19:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.606079 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.606122 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.606130 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.606145 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.606158 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:25Z","lastTransitionTime":"2025-12-03T19:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.709198 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.709255 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.709266 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.709285 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.709297 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:25Z","lastTransitionTime":"2025-12-03T19:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.812358 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.812455 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.812473 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.812498 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.812513 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:25Z","lastTransitionTime":"2025-12-03T19:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.915808 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.915888 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.915913 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.915947 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:25 crc kubenswrapper[4916]: I1203 19:31:25.915973 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:25Z","lastTransitionTime":"2025-12-03T19:31:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.018637 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.018692 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.018705 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.018731 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.018745 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:26Z","lastTransitionTime":"2025-12-03T19:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.122166 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.122215 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.122226 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.122246 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.122255 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:26Z","lastTransitionTime":"2025-12-03T19:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.225426 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.225487 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.225497 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.225518 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.225534 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:26Z","lastTransitionTime":"2025-12-03T19:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.328416 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.328492 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.328506 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.328533 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.328549 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:26Z","lastTransitionTime":"2025-12-03T19:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.431913 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.431987 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.432003 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.432028 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.432041 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:26Z","lastTransitionTime":"2025-12-03T19:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.477446 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.477548 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.477455 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.477720 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:26 crc kubenswrapper[4916]: E1203 19:31:26.477726 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:26 crc kubenswrapper[4916]: E1203 19:31:26.477851 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:26 crc kubenswrapper[4916]: E1203 19:31:26.478013 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:26 crc kubenswrapper[4916]: E1203 19:31:26.478146 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.534765 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.534811 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.534822 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.534841 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.534852 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:26Z","lastTransitionTime":"2025-12-03T19:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.637386 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.637518 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.637528 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.637542 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.637552 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:26Z","lastTransitionTime":"2025-12-03T19:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.741025 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.741089 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.741100 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.741117 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.741130 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:26Z","lastTransitionTime":"2025-12-03T19:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.844517 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.844676 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.844712 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.844759 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.844788 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:26Z","lastTransitionTime":"2025-12-03T19:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.947899 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.947946 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.947955 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.947972 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:26 crc kubenswrapper[4916]: I1203 19:31:26.947984 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:26Z","lastTransitionTime":"2025-12-03T19:31:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.051816 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.051884 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.051898 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.051923 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.051940 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:27Z","lastTransitionTime":"2025-12-03T19:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.155136 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.155193 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.155203 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.155224 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.155237 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:27Z","lastTransitionTime":"2025-12-03T19:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.259040 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.259101 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.259120 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.259144 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.259162 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:27Z","lastTransitionTime":"2025-12-03T19:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.362317 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.362401 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.362426 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.362459 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.362481 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:27Z","lastTransitionTime":"2025-12-03T19:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.475625 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.475689 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.475700 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.475718 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.475728 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:27Z","lastTransitionTime":"2025-12-03T19:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.579766 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.579856 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.579870 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.579893 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.579909 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:27Z","lastTransitionTime":"2025-12-03T19:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.683412 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.683494 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.683517 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.683543 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.683613 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:27Z","lastTransitionTime":"2025-12-03T19:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.788166 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.788419 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.788448 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.788483 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.788508 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:27Z","lastTransitionTime":"2025-12-03T19:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.891344 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.891417 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.891435 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.891462 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.891479 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:27Z","lastTransitionTime":"2025-12-03T19:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.994837 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.994883 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.994902 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.994918 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:27 crc kubenswrapper[4916]: I1203 19:31:27.994929 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:27Z","lastTransitionTime":"2025-12-03T19:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.098173 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.098302 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.098321 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.098347 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.098367 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:28Z","lastTransitionTime":"2025-12-03T19:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.202093 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.202196 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.202225 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.202260 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.202284 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:28Z","lastTransitionTime":"2025-12-03T19:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.305203 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.305271 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.305289 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.305316 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.305334 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:28Z","lastTransitionTime":"2025-12-03T19:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.408888 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.408960 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.408983 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.409012 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.409033 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:28Z","lastTransitionTime":"2025-12-03T19:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.477483 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.477550 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.477641 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:28 crc kubenswrapper[4916]: E1203 19:31:28.477845 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.477879 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:28 crc kubenswrapper[4916]: E1203 19:31:28.478027 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:28 crc kubenswrapper[4916]: E1203 19:31:28.478500 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:28 crc kubenswrapper[4916]: E1203 19:31:28.478667 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.512645 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.512702 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.512713 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.512736 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.512749 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:28Z","lastTransitionTime":"2025-12-03T19:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.616855 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.616921 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.616942 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.616968 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.616982 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:28Z","lastTransitionTime":"2025-12-03T19:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.719411 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.719530 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.719552 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.719604 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.719626 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:28Z","lastTransitionTime":"2025-12-03T19:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.821765 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.821804 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.821817 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.821833 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.821845 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:28Z","lastTransitionTime":"2025-12-03T19:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.924555 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.924648 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.924662 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.924679 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:28 crc kubenswrapper[4916]: I1203 19:31:28.924693 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:28Z","lastTransitionTime":"2025-12-03T19:31:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.026913 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.026969 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.026978 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.026995 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.027012 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:29Z","lastTransitionTime":"2025-12-03T19:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.129822 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.129856 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.129865 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.129878 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.129888 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:29Z","lastTransitionTime":"2025-12-03T19:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.233095 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.233126 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.233136 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.233163 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.233175 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:29Z","lastTransitionTime":"2025-12-03T19:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.335711 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.335771 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.335793 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.335819 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.335840 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:29Z","lastTransitionTime":"2025-12-03T19:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.440419 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.440488 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.440510 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.440539 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.440561 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:29Z","lastTransitionTime":"2025-12-03T19:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.543745 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.543805 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.543817 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.543838 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.543850 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:29Z","lastTransitionTime":"2025-12-03T19:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.645968 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.646075 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.646196 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.646239 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.646305 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:29Z","lastTransitionTime":"2025-12-03T19:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.751465 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.751526 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.751535 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.751558 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.751589 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:29Z","lastTransitionTime":"2025-12-03T19:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.854058 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.854108 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.854117 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.854130 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.854139 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:29Z","lastTransitionTime":"2025-12-03T19:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.956824 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.956895 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.956912 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.956936 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:29 crc kubenswrapper[4916]: I1203 19:31:29.956955 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:29Z","lastTransitionTime":"2025-12-03T19:31:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.059792 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.059857 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.059882 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.059911 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.059933 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:30Z","lastTransitionTime":"2025-12-03T19:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.162835 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.162918 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.162944 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.162976 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.162999 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:30Z","lastTransitionTime":"2025-12-03T19:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.266425 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.266601 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.266629 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.266652 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.266704 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:30Z","lastTransitionTime":"2025-12-03T19:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.371362 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.371445 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.371471 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.371507 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.371545 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:30Z","lastTransitionTime":"2025-12-03T19:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.475767 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.475816 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.475828 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.475846 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.475860 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:30Z","lastTransitionTime":"2025-12-03T19:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.477165 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:30 crc kubenswrapper[4916]: E1203 19:31:30.477279 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.477308 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.477328 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.477752 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:30 crc kubenswrapper[4916]: E1203 19:31:30.477820 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:30 crc kubenswrapper[4916]: E1203 19:31:30.477966 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:30 crc kubenswrapper[4916]: E1203 19:31:30.478090 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.580625 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.580718 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.580743 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.580774 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.580797 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:30Z","lastTransitionTime":"2025-12-03T19:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.684765 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.684826 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.684844 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.684870 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.684889 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:30Z","lastTransitionTime":"2025-12-03T19:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.787414 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.787479 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.787496 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.787519 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.787535 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:30Z","lastTransitionTime":"2025-12-03T19:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.890476 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.890547 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.890611 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.890641 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.890662 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:30Z","lastTransitionTime":"2025-12-03T19:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.993301 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.993343 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.993357 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.993379 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:30 crc kubenswrapper[4916]: I1203 19:31:30.993395 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:30Z","lastTransitionTime":"2025-12-03T19:31:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.096495 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.096621 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.096658 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.096691 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.096714 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:31Z","lastTransitionTime":"2025-12-03T19:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.199424 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.199472 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.199483 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.199501 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.199512 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:31Z","lastTransitionTime":"2025-12-03T19:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.302520 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.302581 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.302593 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.302609 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.302621 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:31Z","lastTransitionTime":"2025-12-03T19:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.405358 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.405436 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.405458 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.405487 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.405510 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:31Z","lastTransitionTime":"2025-12-03T19:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.508031 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.508078 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.508094 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.508117 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.508133 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:31Z","lastTransitionTime":"2025-12-03T19:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.610892 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.610962 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.610986 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.611016 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.611041 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:31Z","lastTransitionTime":"2025-12-03T19:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.714560 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.714653 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.714672 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.714696 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.714713 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:31Z","lastTransitionTime":"2025-12-03T19:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.817664 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.817733 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.817751 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.817777 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.817797 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:31Z","lastTransitionTime":"2025-12-03T19:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.920799 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.920851 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.920869 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.920890 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:31 crc kubenswrapper[4916]: I1203 19:31:31.920908 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:31Z","lastTransitionTime":"2025-12-03T19:31:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.024459 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.024527 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.024545 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.024605 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.024623 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:32Z","lastTransitionTime":"2025-12-03T19:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.127866 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.127947 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.127980 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.128009 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.128030 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:32Z","lastTransitionTime":"2025-12-03T19:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.231403 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.231500 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.231524 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.231556 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.231632 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:32Z","lastTransitionTime":"2025-12-03T19:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.250984 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.251049 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.251068 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.251092 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.251109 4916 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T19:31:32Z","lastTransitionTime":"2025-12-03T19:31:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.322812 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g"] Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.323657 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.326896 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.327084 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.327269 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.328881 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.347665 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=82.347647359 podStartE2EDuration="1m22.347647359s" podCreationTimestamp="2025-12-03 19:30:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:32.347526776 +0000 UTC m=+108.310337052" watchObservedRunningTime="2025-12-03 19:31:32.347647359 +0000 UTC m=+108.310457625" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.373043 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.373147 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.373193 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.373215 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.373234 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.401892 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-kp7gb" podStartSLOduration=88.401873566 podStartE2EDuration="1m28.401873566s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:32.38860889 +0000 UTC m=+108.351419196" watchObservedRunningTime="2025-12-03 19:31:32.401873566 +0000 UTC m=+108.364683852" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.453228 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=58.453209225 podStartE2EDuration="58.453209225s" podCreationTimestamp="2025-12-03 19:30:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:32.433746402 +0000 UTC m=+108.396556718" watchObservedRunningTime="2025-12-03 19:31:32.453209225 +0000 UTC m=+108.416019491" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.474386 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.474450 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.474488 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.474531 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.474559 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.474687 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.474765 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.476137 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.477435 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:32 crc kubenswrapper[4916]: E1203 19:31:32.477538 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.477751 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:32 crc kubenswrapper[4916]: E1203 19:31:32.477802 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.477906 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:32 crc kubenswrapper[4916]: E1203 19:31:32.477964 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.478065 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:32 crc kubenswrapper[4916]: E1203 19:31:32.478119 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.496249 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.499543 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podStartSLOduration=88.499520689 podStartE2EDuration="1m28.499520689s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:32.481280519 +0000 UTC m=+108.444090845" watchObservedRunningTime="2025-12-03 19:31:32.499520689 +0000 UTC m=+108.462330965" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.512534 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/45bd9ae3-8a5d-4c0d-a392-7b549bc788a9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-64f2g\" (UID: \"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.517699 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=29.517679167 podStartE2EDuration="29.517679167s" podCreationTimestamp="2025-12-03 19:31:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:32.499149639 +0000 UTC m=+108.461959905" watchObservedRunningTime="2025-12-03 19:31:32.517679167 +0000 UTC m=+108.480489443" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.528071 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-fcbx4" podStartSLOduration=88.528053425 podStartE2EDuration="1m28.528053425s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:32.527677485 +0000 UTC m=+108.490487751" watchObservedRunningTime="2025-12-03 19:31:32.528053425 +0000 UTC m=+108.490863701" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.589147 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-tpt4n" podStartSLOduration=88.589126716 podStartE2EDuration="1m28.589126716s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:32.563802066 +0000 UTC m=+108.526612342" watchObservedRunningTime="2025-12-03 19:31:32.589126716 +0000 UTC m=+108.551936992" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.589273 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=85.58926703 podStartE2EDuration="1m25.58926703s" podCreationTimestamp="2025-12-03 19:30:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:32.587378459 +0000 UTC m=+108.550188735" watchObservedRunningTime="2025-12-03 19:31:32.58926703 +0000 UTC m=+108.552077306" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.615688 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=88.615669229 podStartE2EDuration="1m28.615669229s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:32.603500322 +0000 UTC m=+108.566310598" watchObservedRunningTime="2025-12-03 19:31:32.615669229 +0000 UTC m=+108.578479505" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.641830 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-4vkgz" podStartSLOduration=88.641807381 podStartE2EDuration="1m28.641807381s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:32.641645566 +0000 UTC m=+108.604455863" watchObservedRunningTime="2025-12-03 19:31:32.641807381 +0000 UTC m=+108.604617647" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.645724 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" Dec 03 19:31:32 crc kubenswrapper[4916]: I1203 19:31:32.656976 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9mxpz" podStartSLOduration=88.656954978 podStartE2EDuration="1m28.656954978s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:32.65554654 +0000 UTC m=+108.618356836" watchObservedRunningTime="2025-12-03 19:31:32.656954978 +0000 UTC m=+108.619765254" Dec 03 19:31:33 crc kubenswrapper[4916]: I1203 19:31:33.345758 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" event={"ID":"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9","Type":"ContainerStarted","Data":"31bf2ef9f3c58a6235df83ec6324b2e77a17dd0fbbc7dad95a0e9c36a21e61f8"} Dec 03 19:31:33 crc kubenswrapper[4916]: I1203 19:31:33.345847 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" event={"ID":"45bd9ae3-8a5d-4c0d-a392-7b549bc788a9","Type":"ContainerStarted","Data":"0b443a6c80bcad4fa9d8759ad8f7895d76942773a981b09fd95fd4fc01e3ada1"} Dec 03 19:31:33 crc kubenswrapper[4916]: I1203 19:31:33.362089 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-64f2g" podStartSLOduration=89.362062597 podStartE2EDuration="1m29.362062597s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:33.361160243 +0000 UTC m=+109.323970509" watchObservedRunningTime="2025-12-03 19:31:33.362062597 +0000 UTC m=+109.324872903" Dec 03 19:31:34 crc kubenswrapper[4916]: I1203 19:31:34.477752 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:34 crc kubenswrapper[4916]: I1203 19:31:34.477832 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:34 crc kubenswrapper[4916]: I1203 19:31:34.477770 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:34 crc kubenswrapper[4916]: I1203 19:31:34.477749 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:34 crc kubenswrapper[4916]: E1203 19:31:34.478652 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:34 crc kubenswrapper[4916]: E1203 19:31:34.478881 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:34 crc kubenswrapper[4916]: E1203 19:31:34.478923 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:34 crc kubenswrapper[4916]: E1203 19:31:34.479007 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:36 crc kubenswrapper[4916]: I1203 19:31:36.477334 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:36 crc kubenswrapper[4916]: I1203 19:31:36.477456 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:36 crc kubenswrapper[4916]: E1203 19:31:36.477488 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:36 crc kubenswrapper[4916]: I1203 19:31:36.477537 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:36 crc kubenswrapper[4916]: I1203 19:31:36.477556 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:36 crc kubenswrapper[4916]: E1203 19:31:36.477763 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:36 crc kubenswrapper[4916]: E1203 19:31:36.477823 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:36 crc kubenswrapper[4916]: E1203 19:31:36.477975 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:38 crc kubenswrapper[4916]: I1203 19:31:38.371886 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4vkgz_d75c407a-2bbd-4cc3-bc0e-b1010aeeab57/kube-multus/1.log" Dec 03 19:31:38 crc kubenswrapper[4916]: I1203 19:31:38.372850 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4vkgz_d75c407a-2bbd-4cc3-bc0e-b1010aeeab57/kube-multus/0.log" Dec 03 19:31:38 crc kubenswrapper[4916]: I1203 19:31:38.372943 4916 generic.go:334] "Generic (PLEG): container finished" podID="d75c407a-2bbd-4cc3-bc0e-b1010aeeab57" containerID="37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5" exitCode=1 Dec 03 19:31:38 crc kubenswrapper[4916]: I1203 19:31:38.373010 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4vkgz" event={"ID":"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57","Type":"ContainerDied","Data":"37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5"} Dec 03 19:31:38 crc kubenswrapper[4916]: I1203 19:31:38.373120 4916 scope.go:117] "RemoveContainer" containerID="18f45847231ecccf5df14aa02e82a27339d0f809df9a42f5f7008547a6f19969" Dec 03 19:31:38 crc kubenswrapper[4916]: I1203 19:31:38.373782 4916 scope.go:117] "RemoveContainer" containerID="37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5" Dec 03 19:31:38 crc kubenswrapper[4916]: E1203 19:31:38.374137 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-4vkgz_openshift-multus(d75c407a-2bbd-4cc3-bc0e-b1010aeeab57)\"" pod="openshift-multus/multus-4vkgz" podUID="d75c407a-2bbd-4cc3-bc0e-b1010aeeab57" Dec 03 19:31:38 crc kubenswrapper[4916]: I1203 19:31:38.477807 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:38 crc kubenswrapper[4916]: I1203 19:31:38.477880 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:38 crc kubenswrapper[4916]: I1203 19:31:38.477899 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:38 crc kubenswrapper[4916]: E1203 19:31:38.477995 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:38 crc kubenswrapper[4916]: I1203 19:31:38.478023 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:38 crc kubenswrapper[4916]: E1203 19:31:38.478120 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:38 crc kubenswrapper[4916]: E1203 19:31:38.478238 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:38 crc kubenswrapper[4916]: E1203 19:31:38.478321 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:39 crc kubenswrapper[4916]: I1203 19:31:39.378795 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4vkgz_d75c407a-2bbd-4cc3-bc0e-b1010aeeab57/kube-multus/1.log" Dec 03 19:31:39 crc kubenswrapper[4916]: I1203 19:31:39.478468 4916 scope.go:117] "RemoveContainer" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" Dec 03 19:31:39 crc kubenswrapper[4916]: E1203 19:31:39.478756 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c9jfr_openshift-ovn-kubernetes(990ba077-9bb2-4ab0-b098-c4c6fd6f4f18)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" Dec 03 19:31:40 crc kubenswrapper[4916]: I1203 19:31:40.478086 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:40 crc kubenswrapper[4916]: I1203 19:31:40.478200 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:40 crc kubenswrapper[4916]: E1203 19:31:40.478272 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:40 crc kubenswrapper[4916]: I1203 19:31:40.478296 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:40 crc kubenswrapper[4916]: I1203 19:31:40.478344 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:40 crc kubenswrapper[4916]: E1203 19:31:40.478462 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:40 crc kubenswrapper[4916]: E1203 19:31:40.478521 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:40 crc kubenswrapper[4916]: E1203 19:31:40.478637 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:42 crc kubenswrapper[4916]: I1203 19:31:42.477226 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:42 crc kubenswrapper[4916]: I1203 19:31:42.477316 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:42 crc kubenswrapper[4916]: E1203 19:31:42.477816 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:42 crc kubenswrapper[4916]: I1203 19:31:42.477377 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:42 crc kubenswrapper[4916]: I1203 19:31:42.477322 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:42 crc kubenswrapper[4916]: E1203 19:31:42.477921 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:42 crc kubenswrapper[4916]: E1203 19:31:42.477994 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:42 crc kubenswrapper[4916]: E1203 19:31:42.478071 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:44 crc kubenswrapper[4916]: I1203 19:31:44.477307 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:44 crc kubenswrapper[4916]: I1203 19:31:44.477402 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:44 crc kubenswrapper[4916]: E1203 19:31:44.480273 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:44 crc kubenswrapper[4916]: I1203 19:31:44.480388 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:44 crc kubenswrapper[4916]: E1203 19:31:44.480448 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:44 crc kubenswrapper[4916]: I1203 19:31:44.480501 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:44 crc kubenswrapper[4916]: E1203 19:31:44.480610 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:44 crc kubenswrapper[4916]: E1203 19:31:44.480655 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:44 crc kubenswrapper[4916]: E1203 19:31:44.519732 4916 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 03 19:31:44 crc kubenswrapper[4916]: E1203 19:31:44.588645 4916 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 19:31:46 crc kubenswrapper[4916]: I1203 19:31:46.477129 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:46 crc kubenswrapper[4916]: I1203 19:31:46.477176 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:46 crc kubenswrapper[4916]: E1203 19:31:46.477361 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:46 crc kubenswrapper[4916]: I1203 19:31:46.477393 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:46 crc kubenswrapper[4916]: I1203 19:31:46.477412 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:46 crc kubenswrapper[4916]: E1203 19:31:46.477456 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:46 crc kubenswrapper[4916]: E1203 19:31:46.477625 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:46 crc kubenswrapper[4916]: E1203 19:31:46.477743 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:48 crc kubenswrapper[4916]: I1203 19:31:48.477385 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:48 crc kubenswrapper[4916]: I1203 19:31:48.477495 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:48 crc kubenswrapper[4916]: I1203 19:31:48.477511 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:48 crc kubenswrapper[4916]: I1203 19:31:48.477612 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:48 crc kubenswrapper[4916]: E1203 19:31:48.477737 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:48 crc kubenswrapper[4916]: E1203 19:31:48.477834 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:48 crc kubenswrapper[4916]: E1203 19:31:48.477998 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:48 crc kubenswrapper[4916]: E1203 19:31:48.478182 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:49 crc kubenswrapper[4916]: E1203 19:31:49.590428 4916 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 19:31:50 crc kubenswrapper[4916]: I1203 19:31:50.477474 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:50 crc kubenswrapper[4916]: I1203 19:31:50.477704 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:50 crc kubenswrapper[4916]: I1203 19:31:50.477847 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:50 crc kubenswrapper[4916]: I1203 19:31:50.477915 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:50 crc kubenswrapper[4916]: E1203 19:31:50.478299 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:50 crc kubenswrapper[4916]: E1203 19:31:50.478486 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:50 crc kubenswrapper[4916]: E1203 19:31:50.478675 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:50 crc kubenswrapper[4916]: E1203 19:31:50.479021 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:50 crc kubenswrapper[4916]: I1203 19:31:50.479320 4916 scope.go:117] "RemoveContainer" containerID="37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5" Dec 03 19:31:51 crc kubenswrapper[4916]: I1203 19:31:51.425417 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4vkgz_d75c407a-2bbd-4cc3-bc0e-b1010aeeab57/kube-multus/1.log" Dec 03 19:31:51 crc kubenswrapper[4916]: I1203 19:31:51.425770 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4vkgz" event={"ID":"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57","Type":"ContainerStarted","Data":"89f2c4f684ccae25dd297700bc3c44bbf0021dc479bb07ef77e0d6ba48e131fb"} Dec 03 19:31:52 crc kubenswrapper[4916]: I1203 19:31:52.478086 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:52 crc kubenswrapper[4916]: I1203 19:31:52.478132 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:52 crc kubenswrapper[4916]: I1203 19:31:52.478185 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:52 crc kubenswrapper[4916]: I1203 19:31:52.478113 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:52 crc kubenswrapper[4916]: E1203 19:31:52.478316 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:52 crc kubenswrapper[4916]: E1203 19:31:52.478402 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:52 crc kubenswrapper[4916]: E1203 19:31:52.478471 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:52 crc kubenswrapper[4916]: E1203 19:31:52.478483 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:53 crc kubenswrapper[4916]: I1203 19:31:53.478755 4916 scope.go:117] "RemoveContainer" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" Dec 03 19:31:54 crc kubenswrapper[4916]: I1203 19:31:54.444129 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-kbxgw"] Dec 03 19:31:54 crc kubenswrapper[4916]: I1203 19:31:54.444254 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:54 crc kubenswrapper[4916]: E1203 19:31:54.444346 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:54 crc kubenswrapper[4916]: I1203 19:31:54.454912 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/3.log" Dec 03 19:31:54 crc kubenswrapper[4916]: I1203 19:31:54.458504 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerStarted","Data":"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4"} Dec 03 19:31:54 crc kubenswrapper[4916]: I1203 19:31:54.459699 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:31:54 crc kubenswrapper[4916]: I1203 19:31:54.477234 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:54 crc kubenswrapper[4916]: I1203 19:31:54.477352 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:54 crc kubenswrapper[4916]: I1203 19:31:54.477552 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:54 crc kubenswrapper[4916]: E1203 19:31:54.477555 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:54 crc kubenswrapper[4916]: E1203 19:31:54.477640 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:54 crc kubenswrapper[4916]: E1203 19:31:54.477382 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:54 crc kubenswrapper[4916]: I1203 19:31:54.496825 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podStartSLOduration=110.496797042 podStartE2EDuration="1m50.496797042s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:31:54.493968446 +0000 UTC m=+130.456778712" watchObservedRunningTime="2025-12-03 19:31:54.496797042 +0000 UTC m=+130.459607298" Dec 03 19:31:54 crc kubenswrapper[4916]: E1203 19:31:54.591349 4916 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 19:31:56 crc kubenswrapper[4916]: I1203 19:31:56.477775 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:56 crc kubenswrapper[4916]: E1203 19:31:56.478229 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:56 crc kubenswrapper[4916]: I1203 19:31:56.477884 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:56 crc kubenswrapper[4916]: E1203 19:31:56.478335 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:56 crc kubenswrapper[4916]: I1203 19:31:56.477909 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:56 crc kubenswrapper[4916]: E1203 19:31:56.478522 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:31:56 crc kubenswrapper[4916]: I1203 19:31:56.477852 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:56 crc kubenswrapper[4916]: E1203 19:31:56.478975 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:58 crc kubenswrapper[4916]: I1203 19:31:58.478865 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:31:58 crc kubenswrapper[4916]: E1203 19:31:58.479010 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 19:31:58 crc kubenswrapper[4916]: I1203 19:31:58.479060 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:31:58 crc kubenswrapper[4916]: E1203 19:31:58.479203 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 19:31:58 crc kubenswrapper[4916]: I1203 19:31:58.479217 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:31:58 crc kubenswrapper[4916]: E1203 19:31:58.479326 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 19:31:58 crc kubenswrapper[4916]: I1203 19:31:58.479214 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:31:58 crc kubenswrapper[4916]: E1203 19:31:58.479448 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-kbxgw" podUID="9ae5584e-d1d9-4aa9-955a-41bdf15f0461" Dec 03 19:32:00 crc kubenswrapper[4916]: I1203 19:32:00.477740 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:32:00 crc kubenswrapper[4916]: I1203 19:32:00.477798 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:32:00 crc kubenswrapper[4916]: I1203 19:32:00.477763 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:32:00 crc kubenswrapper[4916]: I1203 19:32:00.477883 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:32:00 crc kubenswrapper[4916]: I1203 19:32:00.480914 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 19:32:00 crc kubenswrapper[4916]: I1203 19:32:00.480989 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 19:32:00 crc kubenswrapper[4916]: I1203 19:32:00.482830 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 19:32:00 crc kubenswrapper[4916]: I1203 19:32:00.483459 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 19:32:00 crc kubenswrapper[4916]: I1203 19:32:00.483689 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 19:32:00 crc kubenswrapper[4916]: I1203 19:32:00.483844 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.118842 4916 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.173590 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6ngfp"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.174320 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.174998 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5w58b"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.175712 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.176099 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-87mhb"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.176915 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.177050 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.177604 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.191893 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.192335 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 19:32:03 crc kubenswrapper[4916]: W1203 19:32:03.192711 4916 reflector.go:561] object-"openshift-apiserver"/"etcd-serving-ca": failed to list *v1.ConfigMap: configmaps "etcd-serving-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Dec 03 19:32:03 crc kubenswrapper[4916]: E1203 19:32:03.192759 4916 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"etcd-serving-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"etcd-serving-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 19:32:03 crc kubenswrapper[4916]: W1203 19:32:03.194982 4916 reflector.go:561] object-"openshift-apiserver"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Dec 03 19:32:03 crc kubenswrapper[4916]: E1203 19:32:03.195027 4916 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.195159 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.195526 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.195724 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.196358 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.199533 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.199759 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.199921 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.200153 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.200301 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 19:32:03 crc kubenswrapper[4916]: W1203 19:32:03.200448 4916 reflector.go:561] object-"openshift-apiserver"/"etcd-client": failed to list *v1.Secret: secrets "etcd-client" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver": no relationship found between node 'crc' and this object Dec 03 19:32:03 crc kubenswrapper[4916]: E1203 19:32:03.200476 4916 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver\"/\"etcd-client\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"etcd-client\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.200921 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.201288 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.201599 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.201801 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.202032 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.202252 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.202517 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.202743 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.202967 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.203204 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.203393 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.206192 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.206650 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.206911 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.207134 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.207986 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.211778 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.211816 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.211893 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.212167 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.212229 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.212404 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.212540 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.212774 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.212841 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.213221 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tzrmv"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.213408 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.213491 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.213709 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9xdsf"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.214139 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.214644 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.214863 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.216629 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.216693 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.217297 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.351273 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.351294 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.356759 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.361519 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.361860 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.361925 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.362732 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.362994 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.363487 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.363632 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.363867 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.365244 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.365745 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-cdztl"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.366216 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.367423 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.367861 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.368596 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.368825 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.368920 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.368979 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.369129 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.369441 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.369947 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.370268 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.370609 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.370882 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.371038 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.371325 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.371633 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.373641 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.375441 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.375693 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-lcv2z"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.376418 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-dsf48"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.376860 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.377358 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.377825 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.378032 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dsf48" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.380130 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.380281 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.380374 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.380464 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.380610 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.381136 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.381956 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.382155 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5w58b"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.387536 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-n4ps5"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.389594 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.390522 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.391079 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.391589 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6ngfp"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.391763 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.391799 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.391930 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.391965 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.392058 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.392119 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.392265 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.392418 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.393117 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tzrmv"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.395017 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.396699 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.397015 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-lcv2z"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.400210 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.400759 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.400936 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.409419 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.409771 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.410025 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.410310 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.410528 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.410535 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.431823 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9xdsf"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.435025 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.435605 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.438346 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.438662 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.438991 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.439337 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.441615 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.442052 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.455711 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.455889 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.457216 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.457342 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.457787 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.457988 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.458191 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.458832 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.459004 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.461675 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-87mhb"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.461786 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dsf48"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.465101 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.465619 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-n4ps5"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.466240 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.466415 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.467494 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.467993 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-config\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468016 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04ea5305-dfae-4423-8732-a5edbee97000-service-ca-bundle\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468036 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cf10b43c-dac3-462c-b4d5-66b27b895743-metrics-tls\") pod \"dns-operator-744455d44c-n4ps5\" (UID: \"cf10b43c-dac3-462c-b4d5-66b27b895743\") " pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468053 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-gg2sb\" (UID: \"61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468069 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-node-pullsecrets\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468084 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-etcd-client\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468098 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-trusted-ca-bundle\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468114 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d60ab555-3c24-40c0-917b-3bed070c6ec5-serving-cert\") pod \"openshift-config-operator-7777fb866f-rvjw7\" (UID: \"d60ab555-3c24-40c0-917b-3bed070c6ec5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468128 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1710f9e2-c924-4fe6-b405-9aab5e81795f-config\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468147 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468163 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsdnf\" (UniqueName: \"kubernetes.io/projected/1d18bd86-a58f-451c-90c0-9fa9834c6d77-kube-api-access-nsdnf\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468178 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/891743f6-cf2a-499d-914c-003f9a0a6875-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468197 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsj5h\" (UniqueName: \"kubernetes.io/projected/8b9af469-c457-4818-a486-26fc2ca77b9a-kube-api-access-vsj5h\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468213 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-serving-cert\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468233 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4c7q\" (UniqueName: \"kubernetes.io/projected/dbed5156-bd14-449e-943a-488606ac49e2-kube-api-access-z4c7q\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468249 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c2g4\" (UniqueName: \"kubernetes.io/projected/61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26-kube-api-access-7c2g4\") pod \"cluster-samples-operator-665b6dd947-gg2sb\" (UID: \"61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468265 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pp4gs\" (UniqueName: \"kubernetes.io/projected/f8ba73c5-9167-4354-b1d2-896a40e52e1c-kube-api-access-pp4gs\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468281 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04ea5305-dfae-4423-8732-a5edbee97000-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468298 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468315 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-service-ca\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468334 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468350 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdjj6\" (UniqueName: \"kubernetes.io/projected/b2c489cf-d96d-42fe-83df-7447ad03cf43-kube-api-access-pdjj6\") pod \"openshift-apiserver-operator-796bbdcf4f-mc7qr\" (UID: \"b2c489cf-d96d-42fe-83df-7447ad03cf43\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468370 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9932bc0-7bd5-4054-9eb7-6f3e0849e422-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4fbcm\" (UID: \"f9932bc0-7bd5-4054-9eb7-6f3e0849e422\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468401 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtrxt\" (UniqueName: \"kubernetes.io/projected/28b8555a-3084-43be-9d3b-18fd0d993a0d-kube-api-access-mtrxt\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468465 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-audit-dir\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468537 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-client\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468592 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-client-ca\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468619 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdw7m\" (UniqueName: \"kubernetes.io/projected/04ea5305-dfae-4423-8732-a5edbee97000-kube-api-access-vdw7m\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468639 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-config\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468659 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468683 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468703 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8ba73c5-9167-4354-b1d2-896a40e52e1c-serving-cert\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468722 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drqpp\" (UniqueName: \"kubernetes.io/projected/d60ab555-3c24-40c0-917b-3bed070c6ec5-kube-api-access-drqpp\") pod \"openshift-config-operator-7777fb866f-rvjw7\" (UID: \"d60ab555-3c24-40c0-917b-3bed070c6ec5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468765 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-serving-cert\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468787 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2mpf\" (UniqueName: \"kubernetes.io/projected/d83669b5-21c4-48ad-99f9-5abccbf369a3-kube-api-access-b2mpf\") pod \"downloads-7954f5f757-dsf48\" (UID: \"d83669b5-21c4-48ad-99f9-5abccbf369a3\") " pod="openshift-console/downloads-7954f5f757-dsf48" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468855 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kldfn\" (UniqueName: \"kubernetes.io/projected/1710f9e2-c924-4fe6-b405-9aab5e81795f-kube-api-access-kldfn\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468898 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04ea5305-dfae-4423-8732-a5edbee97000-serving-cert\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468951 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-audit-policies\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.468971 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-policies\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469010 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbed5156-bd14-449e-943a-488606ac49e2-config\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469039 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469074 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-encryption-config\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469097 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-client-ca\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469115 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/28b8555a-3084-43be-9d3b-18fd0d993a0d-serving-cert\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469165 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469204 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-trusted-ca-bundle\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469207 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469224 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmp7t\" (UniqueName: \"kubernetes.io/projected/f9932bc0-7bd5-4054-9eb7-6f3e0849e422-kube-api-access-dmp7t\") pod \"openshift-controller-manager-operator-756b6f6bc6-4fbcm\" (UID: \"f9932bc0-7bd5-4054-9eb7-6f3e0849e422\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469241 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4cdh\" (UniqueName: \"kubernetes.io/projected/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-kube-api-access-c4cdh\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469259 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1710f9e2-c924-4fe6-b405-9aab5e81795f-auth-proxy-config\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469277 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469293 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-config\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469311 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/28b8555a-3084-43be-9d3b-18fd0d993a0d-trusted-ca\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469331 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-config\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469348 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2c489cf-d96d-42fe-83df-7447ad03cf43-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mc7qr\" (UID: \"b2c489cf-d96d-42fe-83df-7447ad03cf43\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469366 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469385 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpckw\" (UniqueName: \"kubernetes.io/projected/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-kube-api-access-fpckw\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469402 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/dbed5156-bd14-449e-943a-488606ac49e2-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469419 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-audit-dir\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469437 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-image-import-ca\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469455 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2c489cf-d96d-42fe-83df-7447ad03cf43-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mc7qr\" (UID: \"b2c489cf-d96d-42fe-83df-7447ad03cf43\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469470 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28b8555a-3084-43be-9d3b-18fd0d993a0d-config\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469489 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv42w\" (UniqueName: \"kubernetes.io/projected/891743f6-cf2a-499d-914c-003f9a0a6875-kube-api-access-vv42w\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469510 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b9af469-c457-4818-a486-26fc2ca77b9a-serving-cert\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469602 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s7gk\" (UniqueName: \"kubernetes.io/projected/cf10b43c-dac3-462c-b4d5-66b27b895743-kube-api-access-6s7gk\") pod \"dns-operator-744455d44c-n4ps5\" (UID: \"cf10b43c-dac3-462c-b4d5-66b27b895743\") " pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469626 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-audit\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469643 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/891743f6-cf2a-499d-914c-003f9a0a6875-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469671 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469697 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-encryption-config\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469726 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9932bc0-7bd5-4054-9eb7-6f3e0849e422-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4fbcm\" (UID: \"f9932bc0-7bd5-4054-9eb7-6f3e0849e422\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469744 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-oauth-serving-cert\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469772 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469800 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/891743f6-cf2a-499d-914c-003f9a0a6875-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469832 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469862 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-oauth-config\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469901 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-serving-cert\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469927 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04ea5305-dfae-4423-8732-a5edbee97000-config\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.469949 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-serving-ca\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.470029 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.470069 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.470192 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.470199 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/dbed5156-bd14-449e-943a-488606ac49e2-images\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.470258 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdqzw\" (UniqueName: \"kubernetes.io/projected/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-kube-api-access-xdqzw\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.470288 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-dir\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.470334 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1710f9e2-c924-4fe6-b405-9aab5e81795f-machine-approver-tls\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.470364 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d60ab555-3c24-40c0-917b-3bed070c6ec5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-rvjw7\" (UID: \"d60ab555-3c24-40c0-917b-3bed070c6ec5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.471637 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.472198 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.473061 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-cdztl"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.474872 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwgl6"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.475578 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.476360 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.477009 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.477157 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-d2kts"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.477685 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.478515 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-ckbgp"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.479841 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.480364 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.480519 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.480863 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.480874 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.481507 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.481589 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.481931 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.482384 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-xltwc"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.483011 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.486033 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.486556 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.488324 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.489337 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.490126 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.490650 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.492677 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.493264 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.493288 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-5vmfh"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.494230 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5vmfh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.496338 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.497100 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.498051 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.499021 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.499660 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.500233 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-lk8q6"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.501087 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.501339 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.505704 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rs2tx"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.506050 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.509230 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.516589 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.517440 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.517839 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.518709 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.519584 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.520146 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w86mh"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.520732 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.521579 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6rv69"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.522594 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.523465 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-n59pd"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.523954 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.524414 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.525099 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.525661 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.527105 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.528875 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.530470 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwgl6"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.531650 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.532753 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.534071 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.536167 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-lk8q6"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.536464 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.538406 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rs2tx"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.539529 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-n59pd"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.540868 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-xltwc"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.542814 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.544269 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.545788 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.547111 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w86mh"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.548699 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-rzbqz"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.549650 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-rzbqz" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.550186 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5vmfh"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.551831 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ckbgp"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.553254 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.554685 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.556779 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.557426 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.558680 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.560301 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.561832 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6rv69"] Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571257 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571379 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571458 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdqzw\" (UniqueName: \"kubernetes.io/projected/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-kube-api-access-xdqzw\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571537 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d60ab555-3c24-40c0-917b-3bed070c6ec5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-rvjw7\" (UID: \"d60ab555-3c24-40c0-917b-3bed070c6ec5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571635 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-dir\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571699 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-dir\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571705 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-config\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571772 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d3a49aea-7afb-4578-9717-58559d47a1fe-service-ca-bundle\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571801 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-node-pullsecrets\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571825 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-registration-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571851 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-gg2sb\" (UID: \"61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571879 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-etcd-client\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571901 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-node-pullsecrets\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571904 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/815db339-95a6-41d8-8572-9fab3b7c2030-config\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571972 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsdnf\" (UniqueName: \"kubernetes.io/projected/1d18bd86-a58f-451c-90c0-9fa9834c6d77-kube-api-access-nsdnf\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.571997 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d60ab555-3c24-40c0-917b-3bed070c6ec5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-rvjw7\" (UID: \"d60ab555-3c24-40c0-917b-3bed070c6ec5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572006 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/891743f6-cf2a-499d-914c-003f9a0a6875-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572115 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v562m\" (UniqueName: \"kubernetes.io/projected/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-kube-api-access-v562m\") pod \"marketplace-operator-79b997595-w86mh\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572163 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4c7q\" (UniqueName: \"kubernetes.io/projected/dbed5156-bd14-449e-943a-488606ac49e2-kube-api-access-z4c7q\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572195 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rjbc\" (UniqueName: \"kubernetes.io/projected/1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f-kube-api-access-2rjbc\") pod \"ingress-canary-5vmfh\" (UID: \"1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f\") " pod="openshift-ingress-canary/ingress-canary-5vmfh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572226 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pp4gs\" (UniqueName: \"kubernetes.io/projected/f8ba73c5-9167-4354-b1d2-896a40e52e1c-kube-api-access-pp4gs\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572253 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04ea5305-dfae-4423-8732-a5edbee97000-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572284 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c2g4\" (UniqueName: \"kubernetes.io/projected/61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26-kube-api-access-7c2g4\") pod \"cluster-samples-operator-665b6dd947-gg2sb\" (UID: \"61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572314 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdjj6\" (UniqueName: \"kubernetes.io/projected/b2c489cf-d96d-42fe-83df-7447ad03cf43-kube-api-access-pdjj6\") pod \"openshift-apiserver-operator-796bbdcf4f-mc7qr\" (UID: \"b2c489cf-d96d-42fe-83df-7447ad03cf43\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572344 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/630defa6-81c8-4d9f-84bf-ef45e55be900-config-volume\") pod \"dns-default-ckbgp\" (UID: \"630defa6-81c8-4d9f-84bf-ef45e55be900\") " pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572396 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9932bc0-7bd5-4054-9eb7-6f3e0849e422-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4fbcm\" (UID: \"f9932bc0-7bd5-4054-9eb7-6f3e0849e422\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572426 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc2179db-f671-4331-b3ab-283c0fe68953-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-p6msx\" (UID: \"bc2179db-f671-4331-b3ab-283c0fe68953\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572459 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-audit-dir\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572497 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d3a49aea-7afb-4578-9717-58559d47a1fe-default-certificate\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572550 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-client\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572598 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4c7cc56e-1da2-4fa4-a402-8b2d407caf39-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-gxng9\" (UID: \"4c7cc56e-1da2-4fa4-a402-8b2d407caf39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572641 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-client-ca\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572679 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdw7m\" (UniqueName: \"kubernetes.io/projected/04ea5305-dfae-4423-8732-a5edbee97000-kube-api-access-vdw7m\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572718 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572821 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2mpf\" (UniqueName: \"kubernetes.io/projected/d83669b5-21c4-48ad-99f9-5abccbf369a3-kube-api-access-b2mpf\") pod \"downloads-7954f5f757-dsf48\" (UID: \"d83669b5-21c4-48ad-99f9-5abccbf369a3\") " pod="openshift-console/downloads-7954f5f757-dsf48" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.572972 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ce6a756-7c72-45f6-abb8-96d9597b7429-config-volume\") pod \"collect-profiles-29413170-n5jt8\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.573034 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04ea5305-dfae-4423-8732-a5edbee97000-serving-cert\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.573068 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kldfn\" (UniqueName: \"kubernetes.io/projected/1710f9e2-c924-4fe6-b405-9aab5e81795f-kube-api-access-kldfn\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.573109 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-csi-data-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.573187 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc2179db-f671-4331-b3ab-283c0fe68953-config\") pod \"kube-controller-manager-operator-78b949d7b-p6msx\" (UID: \"bc2179db-f671-4331-b3ab-283c0fe68953\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.573199 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9932bc0-7bd5-4054-9eb7-6f3e0849e422-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4fbcm\" (UID: \"f9932bc0-7bd5-4054-9eb7-6f3e0849e422\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.573227 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/815db339-95a6-41d8-8572-9fab3b7c2030-serving-cert\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.573273 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tknjj\" (UniqueName: \"kubernetes.io/projected/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-kube-api-access-tknjj\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.573506 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mwkd\" (UniqueName: \"kubernetes.io/projected/8d00895f-3847-4601-8fe5-41f8fd32a47a-kube-api-access-7mwkd\") pod \"catalog-operator-68c6474976-bg7xx\" (UID: \"8d00895f-3847-4601-8fe5-41f8fd32a47a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.573537 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-audit-dir\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.573547 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ce6a756-7c72-45f6-abb8-96d9597b7429-secret-volume\") pod \"collect-profiles-29413170-n5jt8\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574022 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574327 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-client-ca\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574469 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcznk\" (UniqueName: \"kubernetes.io/projected/d3a49aea-7afb-4578-9717-58559d47a1fe-kube-api-access-pcznk\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574577 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bznwc\" (UniqueName: \"kubernetes.io/projected/815db339-95a6-41d8-8572-9fab3b7c2030-kube-api-access-bznwc\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574622 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/28b8555a-3084-43be-9d3b-18fd0d993a0d-serving-cert\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574651 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/75435454-50d3-4560-821e-a5b8c171652b-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7rwpc\" (UID: \"75435454-50d3-4560-821e-a5b8c171652b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574680 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-trusted-ca-bundle\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574705 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmp7t\" (UniqueName: \"kubernetes.io/projected/f9932bc0-7bd5-4054-9eb7-6f3e0849e422-kube-api-access-dmp7t\") pod \"openshift-controller-manager-operator-756b6f6bc6-4fbcm\" (UID: \"f9932bc0-7bd5-4054-9eb7-6f3e0849e422\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574710 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04ea5305-dfae-4423-8732-a5edbee97000-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574729 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/28b8555a-3084-43be-9d3b-18fd0d993a0d-trusted-ca\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574758 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1710f9e2-c924-4fe6-b405-9aab5e81795f-auth-proxy-config\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574787 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574813 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-config\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574860 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpckw\" (UniqueName: \"kubernetes.io/projected/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-kube-api-access-fpckw\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574939 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x44xm\" (UniqueName: \"kubernetes.io/projected/0dba9d00-8b2e-4271-a75c-16ceac76a6de-kube-api-access-x44xm\") pod \"kube-storage-version-migrator-operator-b67b599dd-v6slm\" (UID: \"0dba9d00-8b2e-4271-a75c-16ceac76a6de\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.574976 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f-cert\") pod \"ingress-canary-5vmfh\" (UID: \"1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f\") " pod="openshift-ingress-canary/ingress-canary-5vmfh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575018 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/815db339-95a6-41d8-8572-9fab3b7c2030-etcd-client\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575069 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-audit-dir\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575100 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-config\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575127 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575151 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28b8555a-3084-43be-9d3b-18fd0d993a0d-config\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575177 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv42w\" (UniqueName: \"kubernetes.io/projected/891743f6-cf2a-499d-914c-003f9a0a6875-kube-api-access-vv42w\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575215 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s7gk\" (UniqueName: \"kubernetes.io/projected/cf10b43c-dac3-462c-b4d5-66b27b895743-kube-api-access-6s7gk\") pod \"dns-operator-744455d44c-n4ps5\" (UID: \"cf10b43c-dac3-462c-b4d5-66b27b895743\") " pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575251 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsvhh\" (UniqueName: \"kubernetes.io/projected/866b0506-2939-4cae-936e-a21d5040cb3f-kube-api-access-vsvhh\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575324 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575367 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-audit\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575473 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/891743f6-cf2a-499d-914c-003f9a0a6875-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575556 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-encryption-config\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575716 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/891743f6-cf2a-499d-914c-003f9a0a6875-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575749 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc84c\" (UniqueName: \"kubernetes.io/projected/6ecd5613-956e-4d47-beba-d572b9415562-kube-api-access-nc84c\") pod \"service-ca-operator-777779d784-n59pd\" (UID: \"6ecd5613-956e-4d47-beba-d572b9415562\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575776 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575800 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-apiservice-cert\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575822 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-oauth-config\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575845 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d3a49aea-7afb-4578-9717-58559d47a1fe-stats-auth\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575906 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04ea5305-dfae-4423-8732-a5edbee97000-config\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575932 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-serving-ca\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575959 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/dbed5156-bd14-449e-943a-488606ac49e2-images\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.575985 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0dba9d00-8b2e-4271-a75c-16ceac76a6de-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-v6slm\" (UID: \"0dba9d00-8b2e-4271-a75c-16ceac76a6de\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576052 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-webhook-cert\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576080 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1710f9e2-c924-4fe6-b405-9aab5e81795f-machine-approver-tls\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576135 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndrbs\" (UniqueName: \"kubernetes.io/projected/4c7cc56e-1da2-4fa4-a402-8b2d407caf39-kube-api-access-ndrbs\") pod \"package-server-manager-789f6589d5-gxng9\" (UID: \"4c7cc56e-1da2-4fa4-a402-8b2d407caf39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576162 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cf10b43c-dac3-462c-b4d5-66b27b895743-metrics-tls\") pod \"dns-operator-744455d44c-n4ps5\" (UID: \"cf10b43c-dac3-462c-b4d5-66b27b895743\") " pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576188 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04ea5305-dfae-4423-8732-a5edbee97000-service-ca-bundle\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576224 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ecd5613-956e-4d47-beba-d572b9415562-config\") pod \"service-ca-operator-777779d784-n59pd\" (UID: \"6ecd5613-956e-4d47-beba-d572b9415562\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576339 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-trusted-ca-bundle\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576380 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d60ab555-3c24-40c0-917b-3bed070c6ec5-serving-cert\") pod \"openshift-config-operator-7777fb866f-rvjw7\" (UID: \"d60ab555-3c24-40c0-917b-3bed070c6ec5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576418 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsj5h\" (UniqueName: \"kubernetes.io/projected/8b9af469-c457-4818-a486-26fc2ca77b9a-kube-api-access-vsj5h\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576440 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-serving-cert\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576460 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1710f9e2-c924-4fe6-b405-9aab5e81795f-config\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576481 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576505 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc2179db-f671-4331-b3ab-283c0fe68953-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-p6msx\" (UID: \"bc2179db-f671-4331-b3ab-283c0fe68953\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576581 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576596 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-trusted-ca-bundle\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576642 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-service-ca\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576669 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576694 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8d00895f-3847-4601-8fe5-41f8fd32a47a-profile-collector-cert\") pod \"catalog-operator-68c6474976-bg7xx\" (UID: \"8d00895f-3847-4601-8fe5-41f8fd32a47a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576714 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-tmpfs\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576733 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/815db339-95a6-41d8-8572-9fab3b7c2030-etcd-ca\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576775 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-config\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576796 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-plugins-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576823 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtrxt\" (UniqueName: \"kubernetes.io/projected/28b8555a-3084-43be-9d3b-18fd0d993a0d-kube-api-access-mtrxt\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.576844 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/630defa6-81c8-4d9f-84bf-ef45e55be900-metrics-tls\") pod \"dns-default-ckbgp\" (UID: \"630defa6-81c8-4d9f-84bf-ef45e55be900\") " pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577002 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-config\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577004 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577026 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577071 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-serving-cert\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577102 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plcgq\" (UniqueName: \"kubernetes.io/projected/75435454-50d3-4560-821e-a5b8c171652b-kube-api-access-plcgq\") pod \"control-plane-machine-set-operator-78cbb6b69f-7rwpc\" (UID: \"75435454-50d3-4560-821e-a5b8c171652b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577123 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w86mh\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577143 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ecd5613-956e-4d47-beba-d572b9415562-serving-cert\") pod \"service-ca-operator-777779d784-n59pd\" (UID: \"6ecd5613-956e-4d47-beba-d572b9415562\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577168 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8ba73c5-9167-4354-b1d2-896a40e52e1c-serving-cert\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577187 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drqpp\" (UniqueName: \"kubernetes.io/projected/d60ab555-3c24-40c0-917b-3bed070c6ec5-kube-api-access-drqpp\") pod \"openshift-config-operator-7777fb866f-rvjw7\" (UID: \"d60ab555-3c24-40c0-917b-3bed070c6ec5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577205 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-audit-policies\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577221 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-policies\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577237 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d3a49aea-7afb-4578-9717-58559d47a1fe-metrics-certs\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577244 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577271 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-etcd-client\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.578602 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04ea5305-dfae-4423-8732-a5edbee97000-serving-cert\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.578862 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-audit-dir\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.579133 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbed5156-bd14-449e-943a-488606ac49e2-config\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.579207 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-config\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.579218 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.579592 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-audit\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.580400 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04ea5305-dfae-4423-8732-a5edbee97000-config\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.580499 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/28b8555a-3084-43be-9d3b-18fd0d993a0d-serving-cert\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.580585 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/28b8555a-3084-43be-9d3b-18fd0d993a0d-trusted-ca\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.580826 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-service-ca\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.580978 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.581180 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/28b8555a-3084-43be-9d3b-18fd0d993a0d-config\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.581214 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/dbed5156-bd14-449e-943a-488606ac49e2-images\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.581369 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1710f9e2-c924-4fe6-b405-9aab5e81795f-config\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.577255 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbed5156-bd14-449e-943a-488606ac49e2-config\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.581520 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-gg2sb\" (UID: \"61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.581777 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8ba73c5-9167-4354-b1d2-896a40e52e1c-serving-cert\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.582201 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/04ea5305-dfae-4423-8732-a5edbee97000-service-ca-bundle\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.582286 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-serving-cert\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.582350 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.582383 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/891743f6-cf2a-499d-914c-003f9a0a6875-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.582468 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.582543 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.582652 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-encryption-config\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.582988 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-client-ca\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.583506 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-policies\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.583777 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1710f9e2-c924-4fe6-b405-9aab5e81795f-auth-proxy-config\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.583939 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-config\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584039 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-config\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584059 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-encryption-config\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584141 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584317 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584393 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584406 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-trusted-ca-bundle\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584448 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84bdm\" (UniqueName: \"kubernetes.io/projected/630defa6-81c8-4d9f-84bf-ef45e55be900-kube-api-access-84bdm\") pod \"dns-default-ckbgp\" (UID: \"630defa6-81c8-4d9f-84bf-ef45e55be900\") " pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584460 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d60ab555-3c24-40c0-917b-3bed070c6ec5-serving-cert\") pod \"openshift-config-operator-7777fb866f-rvjw7\" (UID: \"d60ab555-3c24-40c0-917b-3bed070c6ec5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584617 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-audit-policies\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584642 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4cdh\" (UniqueName: \"kubernetes.io/projected/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-kube-api-access-c4cdh\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584695 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/dbed5156-bd14-449e-943a-488606ac49e2-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584744 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2c489cf-d96d-42fe-83df-7447ad03cf43-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mc7qr\" (UID: \"b2c489cf-d96d-42fe-83df-7447ad03cf43\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584811 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b9af469-c457-4818-a486-26fc2ca77b9a-serving-cert\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584883 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-image-import-ca\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584914 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2c489cf-d96d-42fe-83df-7447ad03cf43-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mc7qr\" (UID: \"b2c489cf-d96d-42fe-83df-7447ad03cf43\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.584973 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0dba9d00-8b2e-4271-a75c-16ceac76a6de-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-v6slm\" (UID: \"0dba9d00-8b2e-4271-a75c-16ceac76a6de\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585002 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-socket-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585034 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-mountpoint-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585056 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqxpm\" (UniqueName: \"kubernetes.io/projected/4ce6a756-7c72-45f6-abb8-96d9597b7429-kube-api-access-vqxpm\") pod \"collect-profiles-29413170-n5jt8\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585076 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9932bc0-7bd5-4054-9eb7-6f3e0849e422-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4fbcm\" (UID: \"f9932bc0-7bd5-4054-9eb7-6f3e0849e422\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585098 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8d00895f-3847-4601-8fe5-41f8fd32a47a-srv-cert\") pod \"catalog-operator-68c6474976-bg7xx\" (UID: \"8d00895f-3847-4601-8fe5-41f8fd32a47a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585350 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-client-ca\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585405 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585431 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-oauth-serving-cert\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585487 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/815db339-95a6-41d8-8572-9fab3b7c2030-etcd-service-ca\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585647 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/1710f9e2-c924-4fe6-b405-9aab5e81795f-machine-approver-tls\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585805 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-serving-cert\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585877 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2c489cf-d96d-42fe-83df-7447ad03cf43-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mc7qr\" (UID: \"b2c489cf-d96d-42fe-83df-7447ad03cf43\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.585987 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w86mh\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.586408 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.586403 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-serving-cert\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.586543 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-oauth-serving-cert\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.586609 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-image-import-ca\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.587188 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.587859 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.588132 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b9af469-c457-4818-a486-26fc2ca77b9a-serving-cert\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.588189 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/891743f6-cf2a-499d-914c-003f9a0a6875-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.588653 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/dbed5156-bd14-449e-943a-488606ac49e2-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.589157 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b2c489cf-d96d-42fe-83df-7447ad03cf43-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mc7qr\" (UID: \"b2c489cf-d96d-42fe-83df-7447ad03cf43\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.589161 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-encryption-config\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.589251 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9932bc0-7bd5-4054-9eb7-6f3e0849e422-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4fbcm\" (UID: \"f9932bc0-7bd5-4054-9eb7-6f3e0849e422\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.589372 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cf10b43c-dac3-462c-b4d5-66b27b895743-metrics-tls\") pod \"dns-operator-744455d44c-n4ps5\" (UID: \"cf10b43c-dac3-462c-b4d5-66b27b895743\") " pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.589522 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.589727 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-oauth-config\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.589841 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.591533 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-serving-cert\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.601302 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.612139 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.617378 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.637345 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.658766 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.677234 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687518 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndrbs\" (UniqueName: \"kubernetes.io/projected/4c7cc56e-1da2-4fa4-a402-8b2d407caf39-kube-api-access-ndrbs\") pod \"package-server-manager-789f6589d5-gxng9\" (UID: \"4c7cc56e-1da2-4fa4-a402-8b2d407caf39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687614 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ecd5613-956e-4d47-beba-d572b9415562-config\") pod \"service-ca-operator-777779d784-n59pd\" (UID: \"6ecd5613-956e-4d47-beba-d572b9415562\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687653 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc2179db-f671-4331-b3ab-283c0fe68953-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-p6msx\" (UID: \"bc2179db-f671-4331-b3ab-283c0fe68953\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687676 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8d00895f-3847-4601-8fe5-41f8fd32a47a-profile-collector-cert\") pod \"catalog-operator-68c6474976-bg7xx\" (UID: \"8d00895f-3847-4601-8fe5-41f8fd32a47a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687696 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-tmpfs\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687726 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/815db339-95a6-41d8-8572-9fab3b7c2030-etcd-ca\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687764 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-plugins-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687785 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/630defa6-81c8-4d9f-84bf-ef45e55be900-metrics-tls\") pod \"dns-default-ckbgp\" (UID: \"630defa6-81c8-4d9f-84bf-ef45e55be900\") " pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687827 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plcgq\" (UniqueName: \"kubernetes.io/projected/75435454-50d3-4560-821e-a5b8c171652b-kube-api-access-plcgq\") pod \"control-plane-machine-set-operator-78cbb6b69f-7rwpc\" (UID: \"75435454-50d3-4560-821e-a5b8c171652b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687848 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w86mh\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687868 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ecd5613-956e-4d47-beba-d572b9415562-serving-cert\") pod \"service-ca-operator-777779d784-n59pd\" (UID: \"6ecd5613-956e-4d47-beba-d572b9415562\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687927 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d3a49aea-7afb-4578-9717-58559d47a1fe-metrics-certs\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687953 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84bdm\" (UniqueName: \"kubernetes.io/projected/630defa6-81c8-4d9f-84bf-ef45e55be900-kube-api-access-84bdm\") pod \"dns-default-ckbgp\" (UID: \"630defa6-81c8-4d9f-84bf-ef45e55be900\") " pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.687990 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0dba9d00-8b2e-4271-a75c-16ceac76a6de-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-v6slm\" (UID: \"0dba9d00-8b2e-4271-a75c-16ceac76a6de\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688013 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-socket-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688033 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqxpm\" (UniqueName: \"kubernetes.io/projected/4ce6a756-7c72-45f6-abb8-96d9597b7429-kube-api-access-vqxpm\") pod \"collect-profiles-29413170-n5jt8\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688056 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-mountpoint-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688074 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8d00895f-3847-4601-8fe5-41f8fd32a47a-srv-cert\") pod \"catalog-operator-68c6474976-bg7xx\" (UID: \"8d00895f-3847-4601-8fe5-41f8fd32a47a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688093 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/815db339-95a6-41d8-8572-9fab3b7c2030-etcd-service-ca\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688115 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w86mh\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688159 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d3a49aea-7afb-4578-9717-58559d47a1fe-service-ca-bundle\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688182 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-registration-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688204 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v562m\" (UniqueName: \"kubernetes.io/projected/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-kube-api-access-v562m\") pod \"marketplace-operator-79b997595-w86mh\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688228 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/815db339-95a6-41d8-8572-9fab3b7c2030-config\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688267 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rjbc\" (UniqueName: \"kubernetes.io/projected/1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f-kube-api-access-2rjbc\") pod \"ingress-canary-5vmfh\" (UID: \"1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f\") " pod="openshift-ingress-canary/ingress-canary-5vmfh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688312 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/630defa6-81c8-4d9f-84bf-ef45e55be900-config-volume\") pod \"dns-default-ckbgp\" (UID: \"630defa6-81c8-4d9f-84bf-ef45e55be900\") " pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688333 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc2179db-f671-4331-b3ab-283c0fe68953-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-p6msx\" (UID: \"bc2179db-f671-4331-b3ab-283c0fe68953\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688363 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4c7cc56e-1da2-4fa4-a402-8b2d407caf39-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-gxng9\" (UID: \"4c7cc56e-1da2-4fa4-a402-8b2d407caf39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688385 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d3a49aea-7afb-4578-9717-58559d47a1fe-default-certificate\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688434 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-csi-data-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688453 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ce6a756-7c72-45f6-abb8-96d9597b7429-config-volume\") pod \"collect-profiles-29413170-n5jt8\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688471 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc2179db-f671-4331-b3ab-283c0fe68953-config\") pod \"kube-controller-manager-operator-78b949d7b-p6msx\" (UID: \"bc2179db-f671-4331-b3ab-283c0fe68953\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688489 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/815db339-95a6-41d8-8572-9fab3b7c2030-serving-cert\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688508 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mwkd\" (UniqueName: \"kubernetes.io/projected/8d00895f-3847-4601-8fe5-41f8fd32a47a-kube-api-access-7mwkd\") pod \"catalog-operator-68c6474976-bg7xx\" (UID: \"8d00895f-3847-4601-8fe5-41f8fd32a47a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688528 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tknjj\" (UniqueName: \"kubernetes.io/projected/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-kube-api-access-tknjj\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688551 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/75435454-50d3-4560-821e-a5b8c171652b-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7rwpc\" (UID: \"75435454-50d3-4560-821e-a5b8c171652b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688602 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ce6a756-7c72-45f6-abb8-96d9597b7429-secret-volume\") pod \"collect-profiles-29413170-n5jt8\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688628 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcznk\" (UniqueName: \"kubernetes.io/projected/d3a49aea-7afb-4578-9717-58559d47a1fe-kube-api-access-pcznk\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688650 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bznwc\" (UniqueName: \"kubernetes.io/projected/815db339-95a6-41d8-8572-9fab3b7c2030-kube-api-access-bznwc\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688686 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x44xm\" (UniqueName: \"kubernetes.io/projected/0dba9d00-8b2e-4271-a75c-16ceac76a6de-kube-api-access-x44xm\") pod \"kube-storage-version-migrator-operator-b67b599dd-v6slm\" (UID: \"0dba9d00-8b2e-4271-a75c-16ceac76a6de\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688707 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f-cert\") pod \"ingress-canary-5vmfh\" (UID: \"1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f\") " pod="openshift-ingress-canary/ingress-canary-5vmfh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688726 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/815db339-95a6-41d8-8572-9fab3b7c2030-etcd-client\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688765 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsvhh\" (UniqueName: \"kubernetes.io/projected/866b0506-2939-4cae-936e-a21d5040cb3f-kube-api-access-vsvhh\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688787 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-apiservice-cert\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688807 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc84c\" (UniqueName: \"kubernetes.io/projected/6ecd5613-956e-4d47-beba-d572b9415562-kube-api-access-nc84c\") pod \"service-ca-operator-777779d784-n59pd\" (UID: \"6ecd5613-956e-4d47-beba-d572b9415562\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688831 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d3a49aea-7afb-4578-9717-58559d47a1fe-stats-auth\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688849 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0dba9d00-8b2e-4271-a75c-16ceac76a6de-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-v6slm\" (UID: \"0dba9d00-8b2e-4271-a75c-16ceac76a6de\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.688869 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-webhook-cert\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.689657 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-tmpfs\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.690035 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-plugins-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.690339 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-socket-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.690440 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-mountpoint-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.691227 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d3a49aea-7afb-4578-9717-58559d47a1fe-service-ca-bundle\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.691297 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-registration-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.691826 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/866b0506-2939-4cae-936e-a21d5040cb3f-csi-data-dir\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.697901 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.698524 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d3a49aea-7afb-4578-9717-58559d47a1fe-stats-auth\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.717664 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.726524 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d3a49aea-7afb-4578-9717-58559d47a1fe-default-certificate\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.738032 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.758467 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.763955 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d3a49aea-7afb-4578-9717-58559d47a1fe-metrics-certs\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.777645 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.797684 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.817534 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.837116 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.857594 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.862437 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/630defa6-81c8-4d9f-84bf-ef45e55be900-config-volume\") pod \"dns-default-ckbgp\" (UID: \"630defa6-81c8-4d9f-84bf-ef45e55be900\") " pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.877972 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.883797 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/630defa6-81c8-4d9f-84bf-ef45e55be900-metrics-tls\") pod \"dns-default-ckbgp\" (UID: \"630defa6-81c8-4d9f-84bf-ef45e55be900\") " pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.897324 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.918020 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.937134 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.958161 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.978023 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 19:32:03 crc kubenswrapper[4916]: I1203 19:32:03.997790 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.018192 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.037855 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.057196 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.062012 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/815db339-95a6-41d8-8572-9fab3b7c2030-etcd-ca\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.078142 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.099288 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.107054 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/815db339-95a6-41d8-8572-9fab3b7c2030-serving-cert\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.118494 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.127416 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/815db339-95a6-41d8-8572-9fab3b7c2030-etcd-client\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.136899 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.141752 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/815db339-95a6-41d8-8572-9fab3b7c2030-etcd-service-ca\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.158193 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.177634 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.198428 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.203532 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/815db339-95a6-41d8-8572-9fab3b7c2030-config\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.217753 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.223508 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc2179db-f671-4331-b3ab-283c0fe68953-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-p6msx\" (UID: \"bc2179db-f671-4331-b3ab-283c0fe68953\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.237045 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.257187 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.276695 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.285329 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc2179db-f671-4331-b3ab-283c0fe68953-config\") pod \"kube-controller-manager-operator-78b949d7b-p6msx\" (UID: \"bc2179db-f671-4331-b3ab-283c0fe68953\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.316836 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.336930 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.356021 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.378046 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.386379 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/75435454-50d3-4560-821e-a5b8c171652b-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7rwpc\" (UID: \"75435454-50d3-4560-821e-a5b8c171652b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.397851 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.417787 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.437066 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.457054 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.477537 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.485327 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8d00895f-3847-4601-8fe5-41f8fd32a47a-profile-collector-cert\") pod \"catalog-operator-68c6474976-bg7xx\" (UID: \"8d00895f-3847-4601-8fe5-41f8fd32a47a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.486618 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ce6a756-7c72-45f6-abb8-96d9597b7429-secret-volume\") pod \"collect-profiles-29413170-n5jt8\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.495704 4916 request.go:700] Waited for 1.001990803s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.497770 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.517444 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.536810 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.546537 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f-cert\") pod \"ingress-canary-5vmfh\" (UID: \"1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f\") " pod="openshift-ingress-canary/ingress-canary-5vmfh" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.556852 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.573624 4916 secret.go:188] Couldn't get secret openshift-apiserver/etcd-client: failed to sync secret cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.573774 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-client podName:15f7ce9a-d2ff-40c4-b717-5b78ae4ab388 nodeName:}" failed. No retries permitted until 2025-12-03 19:32:05.073747777 +0000 UTC m=+141.036558053 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-client" (UniqueName: "kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-client") pod "apiserver-76f77b778f-87mhb" (UID: "15f7ce9a-d2ff-40c4-b717-5b78ae4ab388") : failed to sync secret cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.579959 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.580631 4916 configmap.go:193] Couldn't get configMap openshift-apiserver/etcd-serving-ca: failed to sync configmap cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.580716 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-serving-ca podName:15f7ce9a-d2ff-40c4-b717-5b78ae4ab388 nodeName:}" failed. No retries permitted until 2025-12-03 19:32:05.080689072 +0000 UTC m=+141.043499348 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-serving-ca" (UniqueName: "kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-serving-ca") pod "apiserver-76f77b778f-87mhb" (UID: "15f7ce9a-d2ff-40c4-b717-5b78ae4ab388") : failed to sync configmap cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.597021 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.605032 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4c7cc56e-1da2-4fa4-a402-8b2d407caf39-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-gxng9\" (UID: \"4c7cc56e-1da2-4fa4-a402-8b2d407caf39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.616927 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.623593 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-webhook-cert\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.628835 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-apiservice-cert\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.638023 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.657852 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.676965 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.683141 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ce6a756-7c72-45f6-abb8-96d9597b7429-config-volume\") pod \"collect-profiles-29413170-n5jt8\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.689873 4916 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.689952 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6ecd5613-956e-4d47-beba-d572b9415562-config podName:6ecd5613-956e-4d47-beba-d572b9415562 nodeName:}" failed. No retries permitted until 2025-12-03 19:32:05.189932551 +0000 UTC m=+141.152742817 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/6ecd5613-956e-4d47-beba-d572b9415562-config") pod "service-ca-operator-777779d784-n59pd" (UID: "6ecd5613-956e-4d47-beba-d572b9415562") : failed to sync configmap cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.690332 4916 configmap.go:193] Couldn't get configMap openshift-kube-storage-version-migrator-operator/config: failed to sync configmap cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.690366 4916 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.690422 4916 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.690381 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0dba9d00-8b2e-4271-a75c-16ceac76a6de-config podName:0dba9d00-8b2e-4271-a75c-16ceac76a6de nodeName:}" failed. No retries permitted until 2025-12-03 19:32:05.190371642 +0000 UTC m=+141.153181908 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/0dba9d00-8b2e-4271-a75c-16ceac76a6de-config") pod "kube-storage-version-migrator-operator-b67b599dd-v6slm" (UID: "0dba9d00-8b2e-4271-a75c-16ceac76a6de") : failed to sync configmap cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.690665 4916 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/catalog-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.690726 4916 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.690778 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6ecd5613-956e-4d47-beba-d572b9415562-serving-cert podName:6ecd5613-956e-4d47-beba-d572b9415562 nodeName:}" failed. No retries permitted until 2025-12-03 19:32:05.190453464 +0000 UTC m=+141.153263730 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/6ecd5613-956e-4d47-beba-d572b9415562-serving-cert") pod "service-ca-operator-777779d784-n59pd" (UID: "6ecd5613-956e-4d47-beba-d572b9415562") : failed to sync secret cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.690809 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-trusted-ca podName:ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c nodeName:}" failed. No retries permitted until 2025-12-03 19:32:05.190798223 +0000 UTC m=+141.153608489 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-trusted-ca") pod "marketplace-operator-79b997595-w86mh" (UID: "ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c") : failed to sync configmap cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.690824 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-operator-metrics podName:ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c nodeName:}" failed. No retries permitted until 2025-12-03 19:32:05.190818813 +0000 UTC m=+141.153629069 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-operator-metrics") pod "marketplace-operator-79b997595-w86mh" (UID: "ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c") : failed to sync secret cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.690845 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8d00895f-3847-4601-8fe5-41f8fd32a47a-srv-cert podName:8d00895f-3847-4601-8fe5-41f8fd32a47a nodeName:}" failed. No retries permitted until 2025-12-03 19:32:05.190838094 +0000 UTC m=+141.153648350 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/8d00895f-3847-4601-8fe5-41f8fd32a47a-srv-cert") pod "catalog-operator-68c6474976-bg7xx" (UID: "8d00895f-3847-4601-8fe5-41f8fd32a47a") : failed to sync secret cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.692422 4916 secret.go:188] Couldn't get secret openshift-kube-storage-version-migrator-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: E1203 19:32:04.692543 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0dba9d00-8b2e-4271-a75c-16ceac76a6de-serving-cert podName:0dba9d00-8b2e-4271-a75c-16ceac76a6de nodeName:}" failed. No retries permitted until 2025-12-03 19:32:05.192511316 +0000 UTC m=+141.155321602 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/0dba9d00-8b2e-4271-a75c-16ceac76a6de-serving-cert") pod "kube-storage-version-migrator-operator-b67b599dd-v6slm" (UID: "0dba9d00-8b2e-4271-a75c-16ceac76a6de") : failed to sync secret cache: timed out waiting for the condition Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.697261 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.717855 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.737785 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.757312 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.777371 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.796706 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.816762 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.837618 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.857443 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.878132 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.897994 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.918084 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.937879 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.957162 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.978410 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 19:32:04 crc kubenswrapper[4916]: I1203 19:32:04.998324 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.025959 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.037714 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.057386 4916 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.078192 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.097910 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.113270 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-client\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.113466 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-serving-ca\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.117641 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.137650 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.157731 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.177773 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.197340 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.214391 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w86mh\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.214625 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0dba9d00-8b2e-4271-a75c-16ceac76a6de-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-v6slm\" (UID: \"0dba9d00-8b2e-4271-a75c-16ceac76a6de\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.214669 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ecd5613-956e-4d47-beba-d572b9415562-config\") pod \"service-ca-operator-777779d784-n59pd\" (UID: \"6ecd5613-956e-4d47-beba-d572b9415562\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.214751 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w86mh\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.214770 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ecd5613-956e-4d47-beba-d572b9415562-serving-cert\") pod \"service-ca-operator-777779d784-n59pd\" (UID: \"6ecd5613-956e-4d47-beba-d572b9415562\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.214833 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0dba9d00-8b2e-4271-a75c-16ceac76a6de-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-v6slm\" (UID: \"0dba9d00-8b2e-4271-a75c-16ceac76a6de\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.214875 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8d00895f-3847-4601-8fe5-41f8fd32a47a-srv-cert\") pod \"catalog-operator-68c6474976-bg7xx\" (UID: \"8d00895f-3847-4601-8fe5-41f8fd32a47a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.216095 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0dba9d00-8b2e-4271-a75c-16ceac76a6de-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-v6slm\" (UID: \"0dba9d00-8b2e-4271-a75c-16ceac76a6de\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.217394 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w86mh\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.217493 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ecd5613-956e-4d47-beba-d572b9415562-config\") pod \"service-ca-operator-777779d784-n59pd\" (UID: \"6ecd5613-956e-4d47-beba-d572b9415562\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.218402 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.219099 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0dba9d00-8b2e-4271-a75c-16ceac76a6de-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-v6slm\" (UID: \"0dba9d00-8b2e-4271-a75c-16ceac76a6de\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.220968 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w86mh\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.223730 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ecd5613-956e-4d47-beba-d572b9415562-serving-cert\") pod \"service-ca-operator-777779d784-n59pd\" (UID: \"6ecd5613-956e-4d47-beba-d572b9415562\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.223865 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8d00895f-3847-4601-8fe5-41f8fd32a47a-srv-cert\") pod \"catalog-operator-68c6474976-bg7xx\" (UID: \"8d00895f-3847-4601-8fe5-41f8fd32a47a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.237587 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.257431 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.280189 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.346751 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsdnf\" (UniqueName: \"kubernetes.io/projected/1d18bd86-a58f-451c-90c0-9fa9834c6d77-kube-api-access-nsdnf\") pod \"console-f9d7485db-cdztl\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.367692 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4c7q\" (UniqueName: \"kubernetes.io/projected/dbed5156-bd14-449e-943a-488606ac49e2-kube-api-access-z4c7q\") pod \"machine-api-operator-5694c8668f-6ngfp\" (UID: \"dbed5156-bd14-449e-943a-488606ac49e2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.400034 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pp4gs\" (UniqueName: \"kubernetes.io/projected/f8ba73c5-9167-4354-b1d2-896a40e52e1c-kube-api-access-pp4gs\") pod \"controller-manager-879f6c89f-5w58b\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.401904 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdjj6\" (UniqueName: \"kubernetes.io/projected/b2c489cf-d96d-42fe-83df-7447ad03cf43-kube-api-access-pdjj6\") pod \"openshift-apiserver-operator-796bbdcf4f-mc7qr\" (UID: \"b2c489cf-d96d-42fe-83df-7447ad03cf43\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.413348 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c2g4\" (UniqueName: \"kubernetes.io/projected/61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26-kube-api-access-7c2g4\") pod \"cluster-samples-operator-665b6dd947-gg2sb\" (UID: \"61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.442647 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdqzw\" (UniqueName: \"kubernetes.io/projected/bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c-kube-api-access-xdqzw\") pod \"apiserver-7bbb656c7d-hhkb7\" (UID: \"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.461488 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2mpf\" (UniqueName: \"kubernetes.io/projected/d83669b5-21c4-48ad-99f9-5abccbf369a3-kube-api-access-b2mpf\") pod \"downloads-7954f5f757-dsf48\" (UID: \"d83669b5-21c4-48ad-99f9-5abccbf369a3\") " pod="openshift-console/downloads-7954f5f757-dsf48" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.475258 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kldfn\" (UniqueName: \"kubernetes.io/projected/1710f9e2-c924-4fe6-b405-9aab5e81795f-kube-api-access-kldfn\") pod \"machine-approver-56656f9798-gc74f\" (UID: \"1710f9e2-c924-4fe6-b405-9aab5e81795f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.481707 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.499523 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdw7m\" (UniqueName: \"kubernetes.io/projected/04ea5305-dfae-4423-8732-a5edbee97000-kube-api-access-vdw7m\") pod \"authentication-operator-69f744f599-tzrmv\" (UID: \"04ea5305-dfae-4423-8732-a5edbee97000\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.502215 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.511386 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsj5h\" (UniqueName: \"kubernetes.io/projected/8b9af469-c457-4818-a486-26fc2ca77b9a-kube-api-access-vsj5h\") pod \"route-controller-manager-6576b87f9c-54gpr\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.516540 4916 request.go:700] Waited for 1.93772537s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift/token Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.543054 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpckw\" (UniqueName: \"kubernetes.io/projected/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-kube-api-access-fpckw\") pod \"oauth-openshift-558db77b4-9xdsf\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.555742 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/891743f6-cf2a-499d-914c-003f9a0a6875-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.558809 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.576340 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmp7t\" (UniqueName: \"kubernetes.io/projected/f9932bc0-7bd5-4054-9eb7-6f3e0849e422-kube-api-access-dmp7t\") pod \"openshift-controller-manager-operator-756b6f6bc6-4fbcm\" (UID: \"f9932bc0-7bd5-4054-9eb7-6f3e0849e422\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.585127 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.592276 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.599861 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.600115 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv42w\" (UniqueName: \"kubernetes.io/projected/891743f6-cf2a-499d-914c-003f9a0a6875-kube-api-access-vv42w\") pod \"cluster-image-registry-operator-dc59b4c8b-gkhs6\" (UID: \"891743f6-cf2a-499d-914c-003f9a0a6875\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.608456 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.614912 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.616627 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s7gk\" (UniqueName: \"kubernetes.io/projected/cf10b43c-dac3-462c-b4d5-66b27b895743-kube-api-access-6s7gk\") pod \"dns-operator-744455d44c-n4ps5\" (UID: \"cf10b43c-dac3-462c-b4d5-66b27b895743\") " pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.626913 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.635976 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtrxt\" (UniqueName: \"kubernetes.io/projected/28b8555a-3084-43be-9d3b-18fd0d993a0d-kube-api-access-mtrxt\") pod \"console-operator-58897d9998-lcv2z\" (UID: \"28b8555a-3084-43be-9d3b-18fd0d993a0d\") " pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.639190 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.646776 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.655063 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.655504 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drqpp\" (UniqueName: \"kubernetes.io/projected/d60ab555-3c24-40c0-917b-3bed070c6ec5-kube-api-access-drqpp\") pod \"openshift-config-operator-7777fb866f-rvjw7\" (UID: \"d60ab555-3c24-40c0-917b-3bed070c6ec5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.668841 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-dsf48" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.671238 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.722697 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plcgq\" (UniqueName: \"kubernetes.io/projected/75435454-50d3-4560-821e-a5b8c171652b-kube-api-access-plcgq\") pod \"control-plane-machine-set-operator-78cbb6b69f-7rwpc\" (UID: \"75435454-50d3-4560-821e-a5b8c171652b\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.734828 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84bdm\" (UniqueName: \"kubernetes.io/projected/630defa6-81c8-4d9f-84bf-ef45e55be900-kube-api-access-84bdm\") pod \"dns-default-ckbgp\" (UID: \"630defa6-81c8-4d9f-84bf-ef45e55be900\") " pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.737763 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.755942 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqxpm\" (UniqueName: \"kubernetes.io/projected/4ce6a756-7c72-45f6-abb8-96d9597b7429-kube-api-access-vqxpm\") pod \"collect-profiles-29413170-n5jt8\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.779818 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.783888 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v562m\" (UniqueName: \"kubernetes.io/projected/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-kube-api-access-v562m\") pod \"marketplace-operator-79b997595-w86mh\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.804192 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rjbc\" (UniqueName: \"kubernetes.io/projected/1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f-kube-api-access-2rjbc\") pod \"ingress-canary-5vmfh\" (UID: \"1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f\") " pod="openshift-ingress-canary/ingress-canary-5vmfh" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.821830 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc2179db-f671-4331-b3ab-283c0fe68953-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-p6msx\" (UID: \"bc2179db-f671-4331-b3ab-283c0fe68953\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.823449 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.832871 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcznk\" (UniqueName: \"kubernetes.io/projected/d3a49aea-7afb-4578-9717-58559d47a1fe-kube-api-access-pcznk\") pod \"router-default-5444994796-d2kts\" (UID: \"d3a49aea-7afb-4578-9717-58559d47a1fe\") " pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.850758 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.865058 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndrbs\" (UniqueName: \"kubernetes.io/projected/4c7cc56e-1da2-4fa4-a402-8b2d407caf39-kube-api-access-ndrbs\") pod \"package-server-manager-789f6589d5-gxng9\" (UID: \"4c7cc56e-1da2-4fa4-a402-8b2d407caf39\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.870850 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mwkd\" (UniqueName: \"kubernetes.io/projected/8d00895f-3847-4601-8fe5-41f8fd32a47a-kube-api-access-7mwkd\") pod \"catalog-operator-68c6474976-bg7xx\" (UID: \"8d00895f-3847-4601-8fe5-41f8fd32a47a\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.872377 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-5vmfh" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.886186 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.888943 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x44xm\" (UniqueName: \"kubernetes.io/projected/0dba9d00-8b2e-4271-a75c-16ceac76a6de-kube-api-access-x44xm\") pod \"kube-storage-version-migrator-operator-b67b599dd-v6slm\" (UID: \"0dba9d00-8b2e-4271-a75c-16ceac76a6de\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.898816 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsvhh\" (UniqueName: \"kubernetes.io/projected/866b0506-2939-4cae-936e-a21d5040cb3f-kube-api-access-vsvhh\") pod \"csi-hostpathplugin-6rv69\" (UID: \"866b0506-2939-4cae-936e-a21d5040cb3f\") " pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.916612 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bznwc\" (UniqueName: \"kubernetes.io/projected/815db339-95a6-41d8-8572-9fab3b7c2030-kube-api-access-bznwc\") pod \"etcd-operator-b45778765-xltwc\" (UID: \"815db339-95a6-41d8-8572-9fab3b7c2030\") " pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.918099 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.933041 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.947172 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.947915 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.953383 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc84c\" (UniqueName: \"kubernetes.io/projected/6ecd5613-956e-4d47-beba-d572b9415562-kube-api-access-nc84c\") pod \"service-ca-operator-777779d784-n59pd\" (UID: \"6ecd5613-956e-4d47-beba-d572b9415562\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.957944 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.963700 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.965798 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tknjj\" (UniqueName: \"kubernetes.io/projected/7ecc55b8-0cbd-4637-9479-f7f0286d0a0b-kube-api-access-tknjj\") pod \"packageserver-d55dfcdfc-w4s7m\" (UID: \"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.969492 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4cdh\" (UniqueName: \"kubernetes.io/projected/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-kube-api-access-c4cdh\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.978843 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.979810 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-serving-ca\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.983953 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-6rv69" Dec 03 19:32:05 crc kubenswrapper[4916]: I1203 19:32:05.993382 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.017126 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.032216 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/15f7ce9a-d2ff-40c4-b717-5b78ae4ab388-etcd-client\") pod \"apiserver-76f77b778f-87mhb\" (UID: \"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388\") " pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.040362 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.061926 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.114707 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.131750 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1b662a6-86cc-484a-91be-0b69b3b7b933-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qv6fr\" (UID: \"e1b662a6-86cc-484a-91be-0b69b3b7b933\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.131805 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/70c66f69-60b7-4b6f-af3f-2ad0b6cb515c-certs\") pod \"machine-config-server-rzbqz\" (UID: \"70c66f69-60b7-4b6f-af3f-2ad0b6cb515c\") " pod="openshift-machine-config-operator/machine-config-server-rzbqz" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.131834 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-registry-certificates\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.131853 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7bad711e-046f-470f-9a18-0cbfadd7f05d-trusted-ca\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.131882 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzbm4\" (UniqueName: \"kubernetes.io/projected/7bad711e-046f-470f-9a18-0cbfadd7f05d-kube-api-access-vzbm4\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.131896 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f33e30c0-88a9-4055-be81-c0ae38f2f540-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qn47d\" (UID: \"f33e30c0-88a9-4055-be81-c0ae38f2f540\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.131911 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/70c66f69-60b7-4b6f-af3f-2ad0b6cb515c-node-bootstrap-token\") pod \"machine-config-server-rzbqz\" (UID: \"70c66f69-60b7-4b6f-af3f-2ad0b6cb515c\") " pod="openshift-machine-config-operator/machine-config-server-rzbqz" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.131945 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/151b79c1-f797-460a-9883-5af28efabd61-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.131979 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/151b79c1-f797-460a-9883-5af28efabd61-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.131996 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phv29\" (UniqueName: \"kubernetes.io/projected/8369d329-fdce-4cd7-ab48-6c50b2a53fea-kube-api-access-phv29\") pod \"service-ca-9c57cc56f-rs2tx\" (UID: \"8369d329-fdce-4cd7-ab48-6c50b2a53fea\") " pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132011 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1b662a6-86cc-484a-91be-0b69b3b7b933-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qv6fr\" (UID: \"e1b662a6-86cc-484a-91be-0b69b3b7b933\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132027 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8l5t\" (UniqueName: \"kubernetes.io/projected/75a73af8-9e9b-4ade-985b-1b4418ffc955-kube-api-access-g8l5t\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132053 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/75a73af8-9e9b-4ade-985b-1b4418ffc955-proxy-tls\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132070 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-lk8q6\" (UID: \"ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132102 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/75a73af8-9e9b-4ade-985b-1b4418ffc955-auth-proxy-config\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132116 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwxbj\" (UniqueName: \"kubernetes.io/projected/33932edb-c741-4dac-a93a-ab01a3eae54d-kube-api-access-zwxbj\") pod \"migrator-59844c95c7-9d5sk\" (UID: \"33932edb-c741-4dac-a93a-ab01a3eae54d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132132 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhkvf\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-kube-api-access-rhkvf\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132164 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/80ccd9f5-c714-4ee7-9bd1-58363e2d1526-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jjrjl\" (UID: \"80ccd9f5-c714-4ee7-9bd1-58363e2d1526\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132207 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbhrl\" (UniqueName: \"kubernetes.io/projected/70c66f69-60b7-4b6f-af3f-2ad0b6cb515c-kube-api-access-fbhrl\") pod \"machine-config-server-rzbqz\" (UID: \"70c66f69-60b7-4b6f-af3f-2ad0b6cb515c\") " pod="openshift-machine-config-operator/machine-config-server-rzbqz" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132232 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e6db190a-b5a9-42da-bad1-63b1429be0a9-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-zlg5c\" (UID: \"e6db190a-b5a9-42da-bad1-63b1429be0a9\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132254 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-trusted-ca\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132285 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-registry-tls\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132366 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132392 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e6db190a-b5a9-42da-bad1-63b1429be0a9-proxy-tls\") pod \"machine-config-controller-84d6567774-zlg5c\" (UID: \"e6db190a-b5a9-42da-bad1-63b1429be0a9\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132408 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1b662a6-86cc-484a-91be-0b69b3b7b933-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qv6fr\" (UID: \"e1b662a6-86cc-484a-91be-0b69b3b7b933\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132473 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f33e30c0-88a9-4055-be81-c0ae38f2f540-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qn47d\" (UID: \"f33e30c0-88a9-4055-be81-c0ae38f2f540\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132508 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7bad711e-046f-470f-9a18-0cbfadd7f05d-metrics-tls\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132594 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/75a73af8-9e9b-4ade-985b-1b4418ffc955-images\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132645 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-bound-sa-token\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132703 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8369d329-fdce-4cd7-ab48-6c50b2a53fea-signing-key\") pod \"service-ca-9c57cc56f-rs2tx\" (UID: \"8369d329-fdce-4cd7-ab48-6c50b2a53fea\") " pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132723 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/80ccd9f5-c714-4ee7-9bd1-58363e2d1526-srv-cert\") pod \"olm-operator-6b444d44fb-jjrjl\" (UID: \"80ccd9f5-c714-4ee7-9bd1-58363e2d1526\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132741 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq8dg\" (UniqueName: \"kubernetes.io/projected/80ccd9f5-c714-4ee7-9bd1-58363e2d1526-kube-api-access-dq8dg\") pod \"olm-operator-6b444d44fb-jjrjl\" (UID: \"80ccd9f5-c714-4ee7-9bd1-58363e2d1526\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132764 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f33e30c0-88a9-4055-be81-c0ae38f2f540-config\") pod \"kube-apiserver-operator-766d6c64bb-qn47d\" (UID: \"f33e30c0-88a9-4055-be81-c0ae38f2f540\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132814 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmxcf\" (UniqueName: \"kubernetes.io/projected/ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb-kube-api-access-wmxcf\") pod \"multus-admission-controller-857f4d67dd-lk8q6\" (UID: \"ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132831 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8369d329-fdce-4cd7-ab48-6c50b2a53fea-signing-cabundle\") pod \"service-ca-9c57cc56f-rs2tx\" (UID: \"8369d329-fdce-4cd7-ab48-6c50b2a53fea\") " pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132846 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wms7\" (UniqueName: \"kubernetes.io/projected/e6db190a-b5a9-42da-bad1-63b1429be0a9-kube-api-access-8wms7\") pod \"machine-config-controller-84d6567774-zlg5c\" (UID: \"e6db190a-b5a9-42da-bad1-63b1429be0a9\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.132861 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7bad711e-046f-470f-9a18-0cbfadd7f05d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: E1203 19:32:06.136331 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:06.636317487 +0000 UTC m=+142.599127753 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.159872 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.199236 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:06 crc kubenswrapper[4916]: W1203 19:32:06.225925 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3a49aea_7afb_4578_9717_58559d47a1fe.slice/crio-21f7797ab9023ef36a71f5dfdc2c36206d0c5eb1cb2da9561606441461a87130 WatchSource:0}: Error finding container 21f7797ab9023ef36a71f5dfdc2c36206d0c5eb1cb2da9561606441461a87130: Status 404 returned error can't find the container with id 21f7797ab9023ef36a71f5dfdc2c36206d0c5eb1cb2da9561606441461a87130 Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.234115 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.234424 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e6db190a-b5a9-42da-bad1-63b1429be0a9-proxy-tls\") pod \"machine-config-controller-84d6567774-zlg5c\" (UID: \"e6db190a-b5a9-42da-bad1-63b1429be0a9\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.234456 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1b662a6-86cc-484a-91be-0b69b3b7b933-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qv6fr\" (UID: \"e1b662a6-86cc-484a-91be-0b69b3b7b933\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.234522 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f33e30c0-88a9-4055-be81-c0ae38f2f540-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qn47d\" (UID: \"f33e30c0-88a9-4055-be81-c0ae38f2f540\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" Dec 03 19:32:06 crc kubenswrapper[4916]: E1203 19:32:06.235163 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:06.735141472 +0000 UTC m=+142.697951738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.235989 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7bad711e-046f-470f-9a18-0cbfadd7f05d-metrics-tls\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.236017 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/75a73af8-9e9b-4ade-985b-1b4418ffc955-images\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.236152 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-bound-sa-token\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.236301 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8369d329-fdce-4cd7-ab48-6c50b2a53fea-signing-key\") pod \"service-ca-9c57cc56f-rs2tx\" (UID: \"8369d329-fdce-4cd7-ab48-6c50b2a53fea\") " pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.236319 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/80ccd9f5-c714-4ee7-9bd1-58363e2d1526-srv-cert\") pod \"olm-operator-6b444d44fb-jjrjl\" (UID: \"80ccd9f5-c714-4ee7-9bd1-58363e2d1526\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.236336 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq8dg\" (UniqueName: \"kubernetes.io/projected/80ccd9f5-c714-4ee7-9bd1-58363e2d1526-kube-api-access-dq8dg\") pod \"olm-operator-6b444d44fb-jjrjl\" (UID: \"80ccd9f5-c714-4ee7-9bd1-58363e2d1526\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.236467 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f33e30c0-88a9-4055-be81-c0ae38f2f540-config\") pod \"kube-apiserver-operator-766d6c64bb-qn47d\" (UID: \"f33e30c0-88a9-4055-be81-c0ae38f2f540\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.236600 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmxcf\" (UniqueName: \"kubernetes.io/projected/ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb-kube-api-access-wmxcf\") pod \"multus-admission-controller-857f4d67dd-lk8q6\" (UID: \"ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.236622 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8369d329-fdce-4cd7-ab48-6c50b2a53fea-signing-cabundle\") pod \"service-ca-9c57cc56f-rs2tx\" (UID: \"8369d329-fdce-4cd7-ab48-6c50b2a53fea\") " pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.236640 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wms7\" (UniqueName: \"kubernetes.io/projected/e6db190a-b5a9-42da-bad1-63b1429be0a9-kube-api-access-8wms7\") pod \"machine-config-controller-84d6567774-zlg5c\" (UID: \"e6db190a-b5a9-42da-bad1-63b1429be0a9\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.236656 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7bad711e-046f-470f-9a18-0cbfadd7f05d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.237785 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f33e30c0-88a9-4055-be81-c0ae38f2f540-config\") pod \"kube-apiserver-operator-766d6c64bb-qn47d\" (UID: \"f33e30c0-88a9-4055-be81-c0ae38f2f540\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.239435 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/75a73af8-9e9b-4ade-985b-1b4418ffc955-images\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.237201 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1b662a6-86cc-484a-91be-0b69b3b7b933-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qv6fr\" (UID: \"e1b662a6-86cc-484a-91be-0b69b3b7b933\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.239759 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/8369d329-fdce-4cd7-ab48-6c50b2a53fea-signing-cabundle\") pod \"service-ca-9c57cc56f-rs2tx\" (UID: \"8369d329-fdce-4cd7-ab48-6c50b2a53fea\") " pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241023 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1b662a6-86cc-484a-91be-0b69b3b7b933-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qv6fr\" (UID: \"e1b662a6-86cc-484a-91be-0b69b3b7b933\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241057 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/70c66f69-60b7-4b6f-af3f-2ad0b6cb515c-certs\") pod \"machine-config-server-rzbqz\" (UID: \"70c66f69-60b7-4b6f-af3f-2ad0b6cb515c\") " pod="openshift-machine-config-operator/machine-config-server-rzbqz" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241146 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-registry-certificates\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241181 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7bad711e-046f-470f-9a18-0cbfadd7f05d-trusted-ca\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241210 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzbm4\" (UniqueName: \"kubernetes.io/projected/7bad711e-046f-470f-9a18-0cbfadd7f05d-kube-api-access-vzbm4\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241228 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f33e30c0-88a9-4055-be81-c0ae38f2f540-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qn47d\" (UID: \"f33e30c0-88a9-4055-be81-c0ae38f2f540\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241267 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/70c66f69-60b7-4b6f-af3f-2ad0b6cb515c-node-bootstrap-token\") pod \"machine-config-server-rzbqz\" (UID: \"70c66f69-60b7-4b6f-af3f-2ad0b6cb515c\") " pod="openshift-machine-config-operator/machine-config-server-rzbqz" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241290 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/151b79c1-f797-460a-9883-5af28efabd61-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241359 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/151b79c1-f797-460a-9883-5af28efabd61-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241379 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1b662a6-86cc-484a-91be-0b69b3b7b933-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qv6fr\" (UID: \"e1b662a6-86cc-484a-91be-0b69b3b7b933\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241415 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8l5t\" (UniqueName: \"kubernetes.io/projected/75a73af8-9e9b-4ade-985b-1b4418ffc955-kube-api-access-g8l5t\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241435 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phv29\" (UniqueName: \"kubernetes.io/projected/8369d329-fdce-4cd7-ab48-6c50b2a53fea-kube-api-access-phv29\") pod \"service-ca-9c57cc56f-rs2tx\" (UID: \"8369d329-fdce-4cd7-ab48-6c50b2a53fea\") " pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241498 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/75a73af8-9e9b-4ade-985b-1b4418ffc955-proxy-tls\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241549 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-lk8q6\" (UID: \"ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241675 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/75a73af8-9e9b-4ade-985b-1b4418ffc955-auth-proxy-config\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241712 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwxbj\" (UniqueName: \"kubernetes.io/projected/33932edb-c741-4dac-a93a-ab01a3eae54d-kube-api-access-zwxbj\") pod \"migrator-59844c95c7-9d5sk\" (UID: \"33932edb-c741-4dac-a93a-ab01a3eae54d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241769 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhkvf\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-kube-api-access-rhkvf\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241823 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/80ccd9f5-c714-4ee7-9bd1-58363e2d1526-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jjrjl\" (UID: \"80ccd9f5-c714-4ee7-9bd1-58363e2d1526\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.241892 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbhrl\" (UniqueName: \"kubernetes.io/projected/70c66f69-60b7-4b6f-af3f-2ad0b6cb515c-kube-api-access-fbhrl\") pod \"machine-config-server-rzbqz\" (UID: \"70c66f69-60b7-4b6f-af3f-2ad0b6cb515c\") " pod="openshift-machine-config-operator/machine-config-server-rzbqz" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.242656 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e6db190a-b5a9-42da-bad1-63b1429be0a9-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-zlg5c\" (UID: \"e6db190a-b5a9-42da-bad1-63b1429be0a9\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.242729 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/80ccd9f5-c714-4ee7-9bd1-58363e2d1526-srv-cert\") pod \"olm-operator-6b444d44fb-jjrjl\" (UID: \"80ccd9f5-c714-4ee7-9bd1-58363e2d1526\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.242797 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-trusted-ca\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.242868 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7bad711e-046f-470f-9a18-0cbfadd7f05d-metrics-tls\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.243108 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-registry-tls\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.243869 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7bad711e-046f-470f-9a18-0cbfadd7f05d-trusted-ca\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.244430 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-registry-certificates\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.245223 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/75a73af8-9e9b-4ade-985b-1b4418ffc955-auth-proxy-config\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.246907 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-trusted-ca\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.248953 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e6db190a-b5a9-42da-bad1-63b1429be0a9-proxy-tls\") pod \"machine-config-controller-84d6567774-zlg5c\" (UID: \"e6db190a-b5a9-42da-bad1-63b1429be0a9\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.249913 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/151b79c1-f797-460a-9883-5af28efabd61-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.250103 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/8369d329-fdce-4cd7-ab48-6c50b2a53fea-signing-key\") pod \"service-ca-9c57cc56f-rs2tx\" (UID: \"8369d329-fdce-4cd7-ab48-6c50b2a53fea\") " pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.250420 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e6db190a-b5a9-42da-bad1-63b1429be0a9-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-zlg5c\" (UID: \"e6db190a-b5a9-42da-bad1-63b1429be0a9\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.252785 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1b662a6-86cc-484a-91be-0b69b3b7b933-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qv6fr\" (UID: \"e1b662a6-86cc-484a-91be-0b69b3b7b933\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.252815 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f33e30c0-88a9-4055-be81-c0ae38f2f540-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qn47d\" (UID: \"f33e30c0-88a9-4055-be81-c0ae38f2f540\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.252874 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/80ccd9f5-c714-4ee7-9bd1-58363e2d1526-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jjrjl\" (UID: \"80ccd9f5-c714-4ee7-9bd1-58363e2d1526\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.254352 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/70c66f69-60b7-4b6f-af3f-2ad0b6cb515c-certs\") pod \"machine-config-server-rzbqz\" (UID: \"70c66f69-60b7-4b6f-af3f-2ad0b6cb515c\") " pod="openshift-machine-config-operator/machine-config-server-rzbqz" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.254643 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/75a73af8-9e9b-4ade-985b-1b4418ffc955-proxy-tls\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.255029 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/151b79c1-f797-460a-9883-5af28efabd61-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.255509 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-lk8q6\" (UID: \"ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.256835 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/70c66f69-60b7-4b6f-af3f-2ad0b6cb515c-node-bootstrap-token\") pod \"machine-config-server-rzbqz\" (UID: \"70c66f69-60b7-4b6f-af3f-2ad0b6cb515c\") " pod="openshift-machine-config-operator/machine-config-server-rzbqz" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.258745 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-registry-tls\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.283763 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-bound-sa-token\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.303075 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7bad711e-046f-470f-9a18-0cbfadd7f05d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.343004 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmxcf\" (UniqueName: \"kubernetes.io/projected/ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb-kube-api-access-wmxcf\") pod \"multus-admission-controller-857f4d67dd-lk8q6\" (UID: \"ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.344163 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: E1203 19:32:06.344492 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:06.844480454 +0000 UTC m=+142.807290720 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.348756 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq8dg\" (UniqueName: \"kubernetes.io/projected/80ccd9f5-c714-4ee7-9bd1-58363e2d1526-kube-api-access-dq8dg\") pod \"olm-operator-6b444d44fb-jjrjl\" (UID: \"80ccd9f5-c714-4ee7-9bd1-58363e2d1526\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.368289 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb"] Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.370670 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f33e30c0-88a9-4055-be81-c0ae38f2f540-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qn47d\" (UID: \"f33e30c0-88a9-4055-be81-c0ae38f2f540\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.373330 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7"] Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.378860 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wms7\" (UniqueName: \"kubernetes.io/projected/e6db190a-b5a9-42da-bad1-63b1429be0a9-kube-api-access-8wms7\") pod \"machine-config-controller-84d6567774-zlg5c\" (UID: \"e6db190a-b5a9-42da-bad1-63b1429be0a9\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.386009 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.399401 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e1b662a6-86cc-484a-91be-0b69b3b7b933-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qv6fr\" (UID: \"e1b662a6-86cc-484a-91be-0b69b3b7b933\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.415146 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbhrl\" (UniqueName: \"kubernetes.io/projected/70c66f69-60b7-4b6f-af3f-2ad0b6cb515c-kube-api-access-fbhrl\") pod \"machine-config-server-rzbqz\" (UID: \"70c66f69-60b7-4b6f-af3f-2ad0b6cb515c\") " pod="openshift-machine-config-operator/machine-config-server-rzbqz" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.431455 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzbm4\" (UniqueName: \"kubernetes.io/projected/7bad711e-046f-470f-9a18-0cbfadd7f05d-kube-api-access-vzbm4\") pod \"ingress-operator-5b745b69d9-tqd7x\" (UID: \"7bad711e-046f-470f-9a18-0cbfadd7f05d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.447804 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:06 crc kubenswrapper[4916]: E1203 19:32:06.448369 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:06.948343957 +0000 UTC m=+142.911154223 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.453822 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwxbj\" (UniqueName: \"kubernetes.io/projected/33932edb-c741-4dac-a93a-ab01a3eae54d-kube-api-access-zwxbj\") pod \"migrator-59844c95c7-9d5sk\" (UID: \"33932edb-c741-4dac-a93a-ab01a3eae54d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.459204 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.472392 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhkvf\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-kube-api-access-rhkvf\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.497853 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phv29\" (UniqueName: \"kubernetes.io/projected/8369d329-fdce-4cd7-ab48-6c50b2a53fea-kube-api-access-phv29\") pod \"service-ca-9c57cc56f-rs2tx\" (UID: \"8369d329-fdce-4cd7-ab48-6c50b2a53fea\") " pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.508490 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-d2kts" event={"ID":"d3a49aea-7afb-4578-9717-58559d47a1fe","Type":"ContainerStarted","Data":"21f7797ab9023ef36a71f5dfdc2c36206d0c5eb1cb2da9561606441461a87130"} Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.508860 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.509857 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" event={"ID":"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c","Type":"ContainerStarted","Data":"2e4c23ffa988d6f0ad22de8a0b8a8ede812701046055999b42d606aa69b7ad64"} Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.517243 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" event={"ID":"1710f9e2-c924-4fe6-b405-9aab5e81795f","Type":"ContainerStarted","Data":"9f85257c76e5086b021729261aefc34e418824a6ab021f1c2ede33ceebec6ea2"} Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.523104 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8l5t\" (UniqueName: \"kubernetes.io/projected/75a73af8-9e9b-4ade-985b-1b4418ffc955-kube-api-access-g8l5t\") pod \"machine-config-operator-74547568cd-l5n96\" (UID: \"75a73af8-9e9b-4ade-985b-1b4418ffc955\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.529588 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.552589 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: E1203 19:32:06.552900 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.052889088 +0000 UTC m=+143.015699354 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.604838 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.618948 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-rzbqz" Dec 03 19:32:06 crc kubenswrapper[4916]: W1203 19:32:06.640899 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70c66f69_60b7_4b6f_af3f_2ad0b6cb515c.slice/crio-25d1007267953f498fb6e0222b674620df14c470860384865b448459c20bf6ca WatchSource:0}: Error finding container 25d1007267953f498fb6e0222b674620df14c470860384865b448459c20bf6ca: Status 404 returned error can't find the container with id 25d1007267953f498fb6e0222b674620df14c470860384865b448459c20bf6ca Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.653348 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:06 crc kubenswrapper[4916]: E1203 19:32:06.653958 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.153939121 +0000 UTC m=+143.116749387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.654462 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.669433 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.671529 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5w58b"] Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.682529 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6ngfp"] Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.696623 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.741202 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk" Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.755694 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: E1203 19:32:06.756073 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.2560572 +0000 UTC m=+143.218867466 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.858424 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:06 crc kubenswrapper[4916]: E1203 19:32:06.858913 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.358897578 +0000 UTC m=+143.321707844 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:06 crc kubenswrapper[4916]: I1203 19:32:06.960882 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:06 crc kubenswrapper[4916]: E1203 19:32:06.961145 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.461130751 +0000 UTC m=+143.423941017 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.061601 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.061926 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.561912137 +0000 UTC m=+143.524722403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.162789 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.163471 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.663460212 +0000 UTC m=+143.626270468 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.267339 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.267534 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.76750965 +0000 UTC m=+143.730319916 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.267874 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.268297 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.76828521 +0000 UTC m=+143.731095476 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.271378 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.303433 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-tzrmv"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.318607 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9xdsf"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.327352 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr"] Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.327879 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04ea5305_dfae_4423_8732_a5edbee97000.slice/crio-3140dc8c12bfb56c55766f272b98f5f66f6a919208a859658b220ee86bda5f1a WatchSource:0}: Error finding container 3140dc8c12bfb56c55766f272b98f5f66f6a919208a859658b220ee86bda5f1a: Status 404 returned error can't find the container with id 3140dc8c12bfb56c55766f272b98f5f66f6a919208a859658b220ee86bda5f1a Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.332738 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb69fe1de_4b0b_4f4a_b1f8_db7be29f3067.slice/crio-30a9968685d5a1f4264e7a0808a2f27d596625e2f01f80fdd3be3cde3740a1b5 WatchSource:0}: Error finding container 30a9968685d5a1f4264e7a0808a2f27d596625e2f01f80fdd3be3cde3740a1b5: Status 404 returned error can't find the container with id 30a9968685d5a1f4264e7a0808a2f27d596625e2f01f80fdd3be3cde3740a1b5 Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.341533 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.369237 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.369327 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.869308152 +0000 UTC m=+143.832118418 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.369534 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.369798 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.869790904 +0000 UTC m=+143.832601170 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.404679 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-n4ps5"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.437249 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.440667 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-cdztl"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.443017 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-5vmfh"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.447657 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.470044 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.470451 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:07.970436517 +0000 UTC m=+143.933246783 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.544681 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" event={"ID":"1710f9e2-c924-4fe6-b405-9aab5e81795f","Type":"ContainerStarted","Data":"4c5af42471178afc8a70644aa2c297675a60e3d7ac713eab9a8d67be67fb289e"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.544719 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" event={"ID":"1710f9e2-c924-4fe6-b405-9aab5e81795f","Type":"ContainerStarted","Data":"cada5f03b73fe9c1344fd68bc6197cc1fcf20b7d82702dcbb8d5613ee488011f"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.546521 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5vmfh" event={"ID":"1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f","Type":"ContainerStarted","Data":"d0f6f32a71515c99b6159accb74f43c34a3d76146a85f8e5286d53391cd2405a"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.548152 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" event={"ID":"8b9af469-c457-4818-a486-26fc2ca77b9a","Type":"ContainerStarted","Data":"943c2390d0b4e417d4f4c7e8bcf3ae5bf28705708cff7cdb6c7263016fd6e7f7"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.551000 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" event={"ID":"04ea5305-dfae-4423-8732-a5edbee97000","Type":"ContainerStarted","Data":"3140dc8c12bfb56c55766f272b98f5f66f6a919208a859658b220ee86bda5f1a"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.551856 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" event={"ID":"b2c489cf-d96d-42fe-83df-7447ad03cf43","Type":"ContainerStarted","Data":"25d40bdd46c9741fc69c0808cdac2817fd9fc4a0fd5f026efb926b0f87df6b14"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.553494 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" event={"ID":"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067","Type":"ContainerStarted","Data":"30a9968685d5a1f4264e7a0808a2f27d596625e2f01f80fdd3be3cde3740a1b5"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.554983 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" event={"ID":"dbed5156-bd14-449e-943a-488606ac49e2","Type":"ContainerStarted","Data":"28e6781312dec2b1f76d4c6003b21b7d8f20072675d737bed5fb5d39f84db206"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.555019 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" event={"ID":"dbed5156-bd14-449e-943a-488606ac49e2","Type":"ContainerStarted","Data":"8994991dfb2aa237a2eb3842653cd8d3eb3b65cb63c83570184d3a1b04457b60"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.561438 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-d2kts" event={"ID":"d3a49aea-7afb-4578-9717-58559d47a1fe","Type":"ContainerStarted","Data":"55c0d068b809dc07c5e41895d04d55b6c93a0624f4e54fa3764d95c2a8b6bb35"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.562436 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" event={"ID":"bc2179db-f671-4331-b3ab-283c0fe68953","Type":"ContainerStarted","Data":"c4016847771a8792963eb70966f454011c57457b72310d72a095ce3ee95062e5"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.566125 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" event={"ID":"61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26","Type":"ContainerStarted","Data":"3510103d0f0e705de182d156bac5c178965472437dd6401082850ddd2e982f2f"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.566164 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" event={"ID":"61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26","Type":"ContainerStarted","Data":"2de98041220a1051ac2a29216aac8085613cb123992cb1054bc0bd093e1c21d5"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.567015 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" event={"ID":"f8ba73c5-9167-4354-b1d2-896a40e52e1c","Type":"ContainerStarted","Data":"96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.567033 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" event={"ID":"f8ba73c5-9167-4354-b1d2-896a40e52e1c","Type":"ContainerStarted","Data":"ccb6938a8bc125fe4d1ed78e4441fd2c69be00436484eb868548feffa33e1766"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.567261 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.568746 4916 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-5w58b container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.568779 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" podUID="f8ba73c5-9167-4354-b1d2-896a40e52e1c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.571108 4916 generic.go:334] "Generic (PLEG): container finished" podID="bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c" containerID="0e8d6c1b8887e4ca513f20f4bcf3901dc392df546cc65b17fca65d93223d9031" exitCode=0 Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.571156 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" event={"ID":"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c","Type":"ContainerDied","Data":"0e8d6c1b8887e4ca513f20f4bcf3901dc392df546cc65b17fca65d93223d9031"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.571402 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.572022 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.072011583 +0000 UTC m=+144.034821849 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.573437 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" event={"ID":"4ce6a756-7c72-45f6-abb8-96d9597b7429","Type":"ContainerStarted","Data":"f55b89a70d1d826e7bb91d872d069afbbf9d6fc01608ddc9284a02c65d070ec4"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.574120 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" event={"ID":"cf10b43c-dac3-462c-b4d5-66b27b895743","Type":"ContainerStarted","Data":"76c2724758b567341facf2b8aa7e0bda1987c4caca5ec950ce7e786d4bd0b439"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.575236 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cdztl" event={"ID":"1d18bd86-a58f-451c-90c0-9fa9834c6d77","Type":"ContainerStarted","Data":"c97b80f38f1baf4123412641cf871628ad32b1a15459c1155323663edb39c956"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.577738 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" event={"ID":"f9932bc0-7bd5-4054-9eb7-6f3e0849e422","Type":"ContainerStarted","Data":"040532c854484a12b5f00172a114c01f2cac5953ce132323d3033022df083b67"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.579275 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-rzbqz" event={"ID":"70c66f69-60b7-4b6f-af3f-2ad0b6cb515c","Type":"ContainerStarted","Data":"29643efc57404375a91e14109022d59b2dd518128e9253dc3d09f8dd08d01f30"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.579332 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-rzbqz" event={"ID":"70c66f69-60b7-4b6f-af3f-2ad0b6cb515c","Type":"ContainerStarted","Data":"25d1007267953f498fb6e0222b674620df14c470860384865b448459c20bf6ca"} Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.672330 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.673429 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.173414594 +0000 UTC m=+144.136224860 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.715776 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w86mh"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.723729 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-6rv69"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.725797 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.731090 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.738470 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-87mhb"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.740684 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-lcv2z"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.749330 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-xltwc"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.751758 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m"] Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.759100 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod866b0506_2939_4cae_936e_a21d5040cb3f.slice/crio-fabfde8a5e7184b72f1cd69321fb3762fbae7c2039bd16b501dc4e875ee7c50a WatchSource:0}: Error finding container fabfde8a5e7184b72f1cd69321fb3762fbae7c2039bd16b501dc4e875ee7c50a: Status 404 returned error can't find the container with id fabfde8a5e7184b72f1cd69321fb3762fbae7c2039bd16b501dc4e875ee7c50a Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.773885 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.774537 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.274518718 +0000 UTC m=+144.237328994 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.783914 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-n59pd"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.786500 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm"] Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.790478 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce6deb9f_2ba1_4efc_96ec_ebd3437b9b5c.slice/crio-dfca687e34d310659babf8ccabbbd7afe1656a9487815dcb9ac6f3e9be67cd1d WatchSource:0}: Error finding container dfca687e34d310659babf8ccabbbd7afe1656a9487815dcb9ac6f3e9be67cd1d: Status 404 returned error can't find the container with id dfca687e34d310659babf8ccabbbd7afe1656a9487815dcb9ac6f3e9be67cd1d Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.790910 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ckbgp"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.793714 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-dsf48"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.794268 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.798681 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.803272 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.805428 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x"] Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.807827 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15f7ce9a_d2ff_40c4_b717_5b78ae4ab388.slice/crio-5094ce4f21ec07e6659fa4ee74cf1be0ddebdab906ed20d87123054de8e65fcf WatchSource:0}: Error finding container 5094ce4f21ec07e6659fa4ee74cf1be0ddebdab906ed20d87123054de8e65fcf: Status 404 returned error can't find the container with id 5094ce4f21ec07e6659fa4ee74cf1be0ddebdab906ed20d87123054de8e65fcf Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.808533 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d"] Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.809850 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod815db339_95a6_41d8_8572_9fab3b7c2030.slice/crio-2677353fc68b632a7070a1925ed883be5b83815139b66be387d8b91e4031d85e WatchSource:0}: Error finding container 2677353fc68b632a7070a1925ed883be5b83815139b66be387d8b91e4031d85e: Status 404 returned error can't find the container with id 2677353fc68b632a7070a1925ed883be5b83815139b66be387d8b91e4031d85e Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.821034 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.830734 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.835110 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.865904 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96"] Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.874709 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.875130 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.37511313 +0000 UTC m=+144.337923396 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.905858 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-gc74f" podStartSLOduration=123.905838203 podStartE2EDuration="2m3.905838203s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:07.905829703 +0000 UTC m=+143.868639989" watchObservedRunningTime="2025-12-03 19:32:07.905838203 +0000 UTC m=+143.868648469" Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.924483 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d00895f_3847_4601_8fe5_41f8fd32a47a.slice/crio-545c25cb7ccf4250b74ab348c3a0b51e01964760c508ec2ab6ab0ca5c57d1a00 WatchSource:0}: Error finding container 545c25cb7ccf4250b74ab348c3a0b51e01964760c508ec2ab6ab0ca5c57d1a00: Status 404 returned error can't find the container with id 545c25cb7ccf4250b74ab348c3a0b51e01964760c508ec2ab6ab0ca5c57d1a00 Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.926331 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c7cc56e_1da2_4fa4_a402_8b2d407caf39.slice/crio-1f0a7ec27befe292bd3e50e7e22f2fcc75fa91eb2dec6a0001210cb059a6d1b2 WatchSource:0}: Error finding container 1f0a7ec27befe292bd3e50e7e22f2fcc75fa91eb2dec6a0001210cb059a6d1b2: Status 404 returned error can't find the container with id 1f0a7ec27befe292bd3e50e7e22f2fcc75fa91eb2dec6a0001210cb059a6d1b2 Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.941750 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd60ab555_3c24_40c0_917b_3bed070c6ec5.slice/crio-5a3873df0060a2433e26e3eeb3a14a2e5acbeb0e9a8e0cc8f5c99b3ae98e7ea9 WatchSource:0}: Error finding container 5a3873df0060a2433e26e3eeb3a14a2e5acbeb0e9a8e0cc8f5c99b3ae98e7ea9: Status 404 returned error can't find the container with id 5a3873df0060a2433e26e3eeb3a14a2e5acbeb0e9a8e0cc8f5c99b3ae98e7ea9 Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.952791 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-d2kts" podStartSLOduration=123.952774574 podStartE2EDuration="2m3.952774574s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:07.950453725 +0000 UTC m=+143.913263991" watchObservedRunningTime="2025-12-03 19:32:07.952774574 +0000 UTC m=+143.915584840" Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.957193 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod75435454_50d3_4560_821e_a5b8c171652b.slice/crio-4f3f489557a080df7091de0bf4f32d93877cbeb6b1e5f5a31a35162cb47d974a WatchSource:0}: Error finding container 4f3f489557a080df7091de0bf4f32d93877cbeb6b1e5f5a31a35162cb47d974a: Status 404 returned error can't find the container with id 4f3f489557a080df7091de0bf4f32d93877cbeb6b1e5f5a31a35162cb47d974a Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.961320 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rs2tx"] Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.972483 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod75a73af8_9e9b_4ade_985b_1b4418ffc955.slice/crio-8ebac47e49da358eaabccb4f7aaed464fc9b4b1ac39a0d7367e105e64f552153 WatchSource:0}: Error finding container 8ebac47e49da358eaabccb4f7aaed464fc9b4b1ac39a0d7367e105e64f552153: Status 404 returned error can't find the container with id 8ebac47e49da358eaabccb4f7aaed464fc9b4b1ac39a0d7367e105e64f552153 Dec 03 19:32:07 crc kubenswrapper[4916]: W1203 19:32:07.976268 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8369d329_fdce_4cd7_ab48_6c50b2a53fea.slice/crio-c92ae4a899d270fcbb960a07a3787fc2111a3621a86b851509bed23f7af99907 WatchSource:0}: Error finding container c92ae4a899d270fcbb960a07a3787fc2111a3621a86b851509bed23f7af99907: Status 404 returned error can't find the container with id c92ae4a899d270fcbb960a07a3787fc2111a3621a86b851509bed23f7af99907 Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.976747 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:07 crc kubenswrapper[4916]: E1203 19:32:07.977064 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.477052445 +0000 UTC m=+144.439862711 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:07 crc kubenswrapper[4916]: I1203 19:32:07.983827 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-lk8q6"] Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.003495 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk"] Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.033193 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" podStartSLOduration=124.033172187 podStartE2EDuration="2m4.033172187s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:08.031603748 +0000 UTC m=+143.994414034" watchObservedRunningTime="2025-12-03 19:32:08.033172187 +0000 UTC m=+143.995982453" Dec 03 19:32:08 crc kubenswrapper[4916]: W1203 19:32:08.038244 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podecf7536e_fc5a_4993_b4c9_e0a560dd1cbb.slice/crio-e8d1909c1f897c1aefdea66c5e776ef51e401fa25a9422f61963d061cf4bb7c0 WatchSource:0}: Error finding container e8d1909c1f897c1aefdea66c5e776ef51e401fa25a9422f61963d061cf4bb7c0: Status 404 returned error can't find the container with id e8d1909c1f897c1aefdea66c5e776ef51e401fa25a9422f61963d061cf4bb7c0 Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.063793 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.068015 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:08 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:08 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:08 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.068213 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.068926 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-rzbqz" podStartSLOduration=5.068915156 podStartE2EDuration="5.068915156s" podCreationTimestamp="2025-12-03 19:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:08.068433374 +0000 UTC m=+144.031243640" watchObservedRunningTime="2025-12-03 19:32:08.068915156 +0000 UTC m=+144.031725422" Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.077691 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.078681 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.578658522 +0000 UTC m=+144.541468778 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.079557 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.080114 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.580099878 +0000 UTC m=+144.542910144 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.180929 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.181080 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.681059848 +0000 UTC m=+144.643870114 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.181240 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.181620 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.681608892 +0000 UTC m=+144.644419158 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.281914 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.282312 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.782294036 +0000 UTC m=+144.745104302 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.383694 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.384451 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.884436026 +0000 UTC m=+144.847246292 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.486163 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.486493 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:08.986473924 +0000 UTC m=+144.949284190 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.593797 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.594074 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:09.094062421 +0000 UTC m=+145.056872687 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.630724 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6rv69" event={"ID":"866b0506-2939-4cae-936e-a21d5040cb3f","Type":"ContainerStarted","Data":"fabfde8a5e7184b72f1cd69321fb3762fbae7c2039bd16b501dc4e875ee7c50a"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.632858 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" event={"ID":"4c7cc56e-1da2-4fa4-a402-8b2d407caf39","Type":"ContainerStarted","Data":"1f0a7ec27befe292bd3e50e7e22f2fcc75fa91eb2dec6a0001210cb059a6d1b2"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.683252 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" event={"ID":"dbed5156-bd14-449e-943a-488606ac49e2","Type":"ContainerStarted","Data":"a6137b5b60c0a41a14cec4bdb43f1b5853ec09a0539d6e7b4bdbe6de42eaba5f"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.695283 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.695632 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:09.195615426 +0000 UTC m=+145.158425692 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.699514 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" event={"ID":"61c1ea8d-f3ba-451c-9bb8-ac64d38f2e26","Type":"ContainerStarted","Data":"dfd4f12400502fd062629b27bab356bd236013ce81a88331a18a5053d8106084"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.705385 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" event={"ID":"815db339-95a6-41d8-8572-9fab3b7c2030","Type":"ContainerStarted","Data":"2677353fc68b632a7070a1925ed883be5b83815139b66be387d8b91e4031d85e"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.723766 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" event={"ID":"7bad711e-046f-470f-9a18-0cbfadd7f05d","Type":"ContainerStarted","Data":"4ccd51478d3533ec382e35a61f0f66e436e7938e5210a652159b05c59b1dba85"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.752712 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" event={"ID":"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c","Type":"ContainerStarted","Data":"5d3191276f07e1bfdd5cfb302237b14bf400246ac78007d6cd08968951944787"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.752757 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" event={"ID":"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c","Type":"ContainerStarted","Data":"dfca687e34d310659babf8ccabbbd7afe1656a9487815dcb9ac6f3e9be67cd1d"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.753072 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.756662 4916 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-w86mh container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.756706 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" podUID="ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.796807 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.797092 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:09.29708018 +0000 UTC m=+145.259890436 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.802199 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" event={"ID":"0dba9d00-8b2e-4271-a75c-16ceac76a6de","Type":"ContainerStarted","Data":"8a2cc96f495456362b3bcc2b7fc7e68f61d367bd7d3af3d54597f63df9f8bbe2"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.816582 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" event={"ID":"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067","Type":"ContainerStarted","Data":"faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.817616 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.819396 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" event={"ID":"6ecd5613-956e-4d47-beba-d572b9415562","Type":"ContainerStarted","Data":"3a1ea92cabee43645f7246e40246087c10fe869b54629552d0ae5960737f9801"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.820998 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" event={"ID":"891743f6-cf2a-499d-914c-003f9a0a6875","Type":"ContainerStarted","Data":"ac5f88285b76ee56b53ba8f72265456c543037fc86b9e88d10f4d7ebabd0cabb"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.829849 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" event={"ID":"4ce6a756-7c72-45f6-abb8-96d9597b7429","Type":"ContainerStarted","Data":"e7e03eda1780dc42fa5b5def994d10f4439d824dedf641dedd995fc87813df52"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.833752 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" event={"ID":"e6db190a-b5a9-42da-bad1-63b1429be0a9","Type":"ContainerStarted","Data":"e6673f35b73210567d256c970c2aa3ed7566b6f35774e0696cf3b50d6421da90"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.834889 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-lcv2z" event={"ID":"28b8555a-3084-43be-9d3b-18fd0d993a0d","Type":"ContainerStarted","Data":"cee4dd8abd730cd2662d1bd72379da6534101fa3d30515e8321a036e2e7037e1"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.837173 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" event={"ID":"80ccd9f5-c714-4ee7-9bd1-58363e2d1526","Type":"ContainerStarted","Data":"d1cdcbe20c1f898515c0542e90ef373be119579530a435d3472623cc9b73a2fc"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.856079 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" event={"ID":"f9932bc0-7bd5-4054-9eb7-6f3e0849e422","Type":"ContainerStarted","Data":"fee0ff12e2888fde6afdf47a6e615d8f38dd919e687d57e34e801332a2bb9799"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.857188 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" event={"ID":"ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb","Type":"ContainerStarted","Data":"e8d1909c1f897c1aefdea66c5e776ef51e401fa25a9422f61963d061cf4bb7c0"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.858771 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc" event={"ID":"75435454-50d3-4560-821e-a5b8c171652b","Type":"ContainerStarted","Data":"4f3f489557a080df7091de0bf4f32d93877cbeb6b1e5f5a31a35162cb47d974a"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.863587 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" event={"ID":"8b9af469-c457-4818-a486-26fc2ca77b9a","Type":"ContainerStarted","Data":"695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.863829 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.865290 4916 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-54gpr container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.865358 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" podUID="8b9af469-c457-4818-a486-26fc2ca77b9a" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.868194 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" event={"ID":"8369d329-fdce-4cd7-ab48-6c50b2a53fea","Type":"ContainerStarted","Data":"c92ae4a899d270fcbb960a07a3787fc2111a3621a86b851509bed23f7af99907"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.870939 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-5vmfh" event={"ID":"1ef33a57-9423-4fa9-86e1-0e7ad24f5e1f","Type":"ContainerStarted","Data":"ae29010b64644cff7b830d6f26677eb0e1a2206c8c0ab52848deea578fb4bfa2"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.872446 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" event={"ID":"f33e30c0-88a9-4055-be81-c0ae38f2f540","Type":"ContainerStarted","Data":"cb5cd8768ff65e1a64c923b369a2fe5dff20cf6440e00388deabee36deb1a4e5"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.874626 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cdztl" event={"ID":"1d18bd86-a58f-451c-90c0-9fa9834c6d77","Type":"ContainerStarted","Data":"870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.876622 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" event={"ID":"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b","Type":"ContainerStarted","Data":"fcec9b842b8ab445cf483e70a03ada1a4aaf351c11a78674e6e9211ccc2923fe"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.879173 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" event={"ID":"b2c489cf-d96d-42fe-83df-7447ad03cf43","Type":"ContainerStarted","Data":"bc879ffb8217724e6b569399d77032e47fb57e202d9dda8e793dea881b891918"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.880416 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" event={"ID":"d60ab555-3c24-40c0-917b-3bed070c6ec5","Type":"ContainerStarted","Data":"5a3873df0060a2433e26e3eeb3a14a2e5acbeb0e9a8e0cc8f5c99b3ae98e7ea9"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.881597 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" event={"ID":"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388","Type":"ContainerStarted","Data":"5094ce4f21ec07e6659fa4ee74cf1be0ddebdab906ed20d87123054de8e65fcf"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.883324 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" event={"ID":"bc2179db-f671-4331-b3ab-283c0fe68953","Type":"ContainerStarted","Data":"9c23bc87951b45dab59a82dafc52c846b3c0fd19f4c617c12b7e1274eeece5d7"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.884154 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" event={"ID":"75a73af8-9e9b-4ade-985b-1b4418ffc955","Type":"ContainerStarted","Data":"8ebac47e49da358eaabccb4f7aaed464fc9b4b1ac39a0d7367e105e64f552153"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.885551 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk" event={"ID":"33932edb-c741-4dac-a93a-ab01a3eae54d","Type":"ContainerStarted","Data":"e873b45a89214b3de947231b6818ddcce3813225d0c957701bdeddde2ef11f61"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.887281 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" event={"ID":"e1b662a6-86cc-484a-91be-0b69b3b7b933","Type":"ContainerStarted","Data":"71dc16e294dbb186e4241b2fd261b7ea302aaf36696bdf01ce3bb3c99eefba86"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.887311 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" event={"ID":"e1b662a6-86cc-484a-91be-0b69b3b7b933","Type":"ContainerStarted","Data":"0c03b46473e007062fe3c133ef8918550524374a3e1e08bc35cd4ec7d1469dd2"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.892671 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" event={"ID":"04ea5305-dfae-4423-8732-a5edbee97000","Type":"ContainerStarted","Data":"d9b14a25ba07ee80a6059843de4be9222943a8a71c7d3814e33fbdafc42442cf"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.894660 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dsf48" event={"ID":"d83669b5-21c4-48ad-99f9-5abccbf369a3","Type":"ContainerStarted","Data":"0974e91e04594a7462b3b1623041f073ffa693950a02d6092e8f198b83b5b541"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.897395 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.897648 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:09.397616319 +0000 UTC m=+145.360426595 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.897699 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:08 crc kubenswrapper[4916]: E1203 19:32:08.898290 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:09.398277376 +0000 UTC m=+145.361087642 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.899261 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" event={"ID":"8d00895f-3847-4601-8fe5-41f8fd32a47a","Type":"ContainerStarted","Data":"545c25cb7ccf4250b74ab348c3a0b51e01964760c508ec2ab6ab0ca5c57d1a00"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.901620 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ckbgp" event={"ID":"630defa6-81c8-4d9f-84bf-ef45e55be900","Type":"ContainerStarted","Data":"f55fe778cae5f23e8c3ebcdfe10e7c3d1a909deacaa9382d151e3749d2c7874c"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.901655 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ckbgp" event={"ID":"630defa6-81c8-4d9f-84bf-ef45e55be900","Type":"ContainerStarted","Data":"8eb6cf485b10b6091be8869fa202a395edccc243b96f2721eb150a7200dbf8f5"} Dec 03 19:32:08 crc kubenswrapper[4916]: I1203 19:32:08.963748 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.004688 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.004757 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:09.504741815 +0000 UTC m=+145.467552081 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.005743 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.025360 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:09.525291022 +0000 UTC m=+145.488101298 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.071835 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:09 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:09 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:09 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.071887 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.123850 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.124208 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:09.624192671 +0000 UTC m=+145.587002937 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.225626 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.226340 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:09.726326391 +0000 UTC m=+145.689136667 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.326759 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.327349 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:09.827333142 +0000 UTC m=+145.790143408 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.356638 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.429224 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.429500 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:09.929487973 +0000 UTC m=+145.892298239 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.438118 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" podStartSLOduration=125.4381004 podStartE2EDuration="2m5.4381004s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.436093009 +0000 UTC m=+145.398903275" watchObservedRunningTime="2025-12-03 19:32:09.4381004 +0000 UTC m=+145.400910666" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.459359 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-6ngfp" podStartSLOduration=125.459343634 podStartE2EDuration="2m5.459343634s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.454921773 +0000 UTC m=+145.417732039" watchObservedRunningTime="2025-12-03 19:32:09.459343634 +0000 UTC m=+145.422153900" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.528490 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gg2sb" podStartSLOduration=125.528466584 podStartE2EDuration="2m5.528466584s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.502851119 +0000 UTC m=+145.465661385" watchObservedRunningTime="2025-12-03 19:32:09.528466584 +0000 UTC m=+145.491276840" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.528982 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-tzrmv" podStartSLOduration=125.528975246 podStartE2EDuration="2m5.528975246s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.528247758 +0000 UTC m=+145.491058024" watchObservedRunningTime="2025-12-03 19:32:09.528975246 +0000 UTC m=+145.491785512" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.529842 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.530178 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:10.030159626 +0000 UTC m=+145.992969892 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.611687 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-5vmfh" podStartSLOduration=6.611666337 podStartE2EDuration="6.611666337s" podCreationTimestamp="2025-12-03 19:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.606719963 +0000 UTC m=+145.569530229" watchObservedRunningTime="2025-12-03 19:32:09.611666337 +0000 UTC m=+145.574476613" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.613294 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" podStartSLOduration=125.613284048 podStartE2EDuration="2m5.613284048s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.565883945 +0000 UTC m=+145.528694221" watchObservedRunningTime="2025-12-03 19:32:09.613284048 +0000 UTC m=+145.576094314" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.631297 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.631901 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:10.131885406 +0000 UTC m=+146.094695672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.633859 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" podStartSLOduration=125.633848755 podStartE2EDuration="2m5.633848755s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.632585854 +0000 UTC m=+145.595396130" watchObservedRunningTime="2025-12-03 19:32:09.633848755 +0000 UTC m=+145.596659021" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.733070 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.733401 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:10.23338791 +0000 UTC m=+146.196198176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.769386 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mc7qr" podStartSLOduration=125.769371576 podStartE2EDuration="2m5.769371576s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.732608951 +0000 UTC m=+145.695419217" watchObservedRunningTime="2025-12-03 19:32:09.769371576 +0000 UTC m=+145.732181842" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.798592 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-cdztl" podStartSLOduration=125.798554749 podStartE2EDuration="2m5.798554749s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.771309994 +0000 UTC m=+145.734120260" watchObservedRunningTime="2025-12-03 19:32:09.798554749 +0000 UTC m=+145.761365015" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.799006 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qv6fr" podStartSLOduration=125.79900131 podStartE2EDuration="2m5.79900131s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.796925318 +0000 UTC m=+145.759735574" watchObservedRunningTime="2025-12-03 19:32:09.79900131 +0000 UTC m=+145.761811576" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.834371 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.834739 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:10.334723059 +0000 UTC m=+146.297533325 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.876531 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" podStartSLOduration=125.876513291 podStartE2EDuration="2m5.876513291s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.874796898 +0000 UTC m=+145.837607154" watchObservedRunningTime="2025-12-03 19:32:09.876513291 +0000 UTC m=+145.839323557" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.878533 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p6msx" podStartSLOduration=125.878523951 podStartE2EDuration="2m5.878523951s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.853223385 +0000 UTC m=+145.816033651" watchObservedRunningTime="2025-12-03 19:32:09.878523951 +0000 UTC m=+145.841334217" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.918802 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4fbcm" podStartSLOduration=125.918788015 podStartE2EDuration="2m5.918788015s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:09.916558308 +0000 UTC m=+145.879368574" watchObservedRunningTime="2025-12-03 19:32:09.918788015 +0000 UTC m=+145.881598281" Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.943728 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.943851 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:10.443836575 +0000 UTC m=+146.406646841 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:09 crc kubenswrapper[4916]: I1203 19:32:09.943881 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:09 crc kubenswrapper[4916]: E1203 19:32:09.944165 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:10.444156783 +0000 UTC m=+146.406967049 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.000719 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" event={"ID":"f33e30c0-88a9-4055-be81-c0ae38f2f540","Type":"ContainerStarted","Data":"26b4fe28c8bceafefadbb49a9534a52bcc041d00ace3ea2df4c8b1f235394883"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.005042 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc" event={"ID":"75435454-50d3-4560-821e-a5b8c171652b","Type":"ContainerStarted","Data":"b4e895af500ad4d066fc22d0cbcba35a8fa51ab9b1d8602c004cf9bc4a3bc1ec"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.008311 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" event={"ID":"7ecc55b8-0cbd-4637-9479-f7f0286d0a0b","Type":"ContainerStarted","Data":"b1bbedb22ec60fbd04646109da02bb575556c7af8ee0851759d85b307a489b95"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.009089 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.012491 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" event={"ID":"0dba9d00-8b2e-4271-a75c-16ceac76a6de","Type":"ContainerStarted","Data":"e84843a0eb8de7e29bc0e771d791ede262cc38828898b81a0dcdff902cde2a0d"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.023675 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7rwpc" podStartSLOduration=126.023661863 podStartE2EDuration="2m6.023661863s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.021244493 +0000 UTC m=+145.984054759" watchObservedRunningTime="2025-12-03 19:32:10.023661863 +0000 UTC m=+145.986472129" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.033498 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" event={"ID":"4c7cc56e-1da2-4fa4-a402-8b2d407caf39","Type":"ContainerStarted","Data":"cdb232af5f25a8bd24abd2fcb342e8231bf176c24c3442cf51f0c68ef74ad17f"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.038817 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-v6slm" podStartSLOduration=126.038800614 podStartE2EDuration="2m6.038800614s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.037384279 +0000 UTC m=+146.000194545" watchObservedRunningTime="2025-12-03 19:32:10.038800614 +0000 UTC m=+146.001610880" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.044910 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.045085 4916 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-w4s7m container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.43:5443/healthz\": dial tcp 10.217.0.43:5443: connect: connection refused" start-of-body= Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.045125 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" podUID="7ecc55b8-0cbd-4637-9479-f7f0286d0a0b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.43:5443/healthz\": dial tcp 10.217.0.43:5443: connect: connection refused" Dec 03 19:32:10 crc kubenswrapper[4916]: E1203 19:32:10.045267 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:10.545254977 +0000 UTC m=+146.508065243 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.058188 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-dsf48" event={"ID":"d83669b5-21c4-48ad-99f9-5abccbf369a3","Type":"ContainerStarted","Data":"9e89d94ac89f67e30336297ddf0b5e7479df4e060686720de803b47b97aee5e1"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.059166 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-dsf48" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.060919 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk" event={"ID":"33932edb-c741-4dac-a93a-ab01a3eae54d","Type":"ContainerStarted","Data":"b59edf2fa8163bfc1569dadb4ef2c2ce226067ce8ad06b605fd3ff402df8e10d"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.063290 4916 patch_prober.go:28] interesting pod/downloads-7954f5f757-dsf48 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.063343 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dsf48" podUID="d83669b5-21c4-48ad-99f9-5abccbf369a3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.065188 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" event={"ID":"8d00895f-3847-4601-8fe5-41f8fd32a47a","Type":"ContainerStarted","Data":"33fafdd4005b2211111039e5d519be3df36999308294f3645a2fad200b773e7b"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.065774 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:10 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:10 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:10 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.065822 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.065887 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.066716 4916 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-bg7xx container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.066746 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" podUID="8d00895f-3847-4601-8fe5-41f8fd32a47a" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.066921 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" event={"ID":"ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb","Type":"ContainerStarted","Data":"b2a9cf58a35b4f29d229368de886822e9cb96c349bba1c5c1402ea98c33cfada"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.067453 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" podStartSLOduration=126.067411694 podStartE2EDuration="2m6.067411694s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.059459464 +0000 UTC m=+146.022269730" watchObservedRunningTime="2025-12-03 19:32:10.067411694 +0000 UTC m=+146.030221960" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.068150 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" event={"ID":"815db339-95a6-41d8-8572-9fab3b7c2030","Type":"ContainerStarted","Data":"054c4f40251cc55b7186948a8d8575be52c21534842097d289e9d6f40d43f885"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.079043 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" event={"ID":"75a73af8-9e9b-4ade-985b-1b4418ffc955","Type":"ContainerStarted","Data":"d4e2fae705a191c0f4e5c8099b8549a11246399e37a71497db399c9e69c6f5fa"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.111813 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" event={"ID":"e6db190a-b5a9-42da-bad1-63b1429be0a9","Type":"ContainerStarted","Data":"1dcf5e6086c7dcf0ec48fbf1a1474bb4ff9d693780f136afaf2613e5564ca770"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.138121 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-dsf48" podStartSLOduration=126.138104953 podStartE2EDuration="2m6.138104953s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.101949693 +0000 UTC m=+146.064759959" watchObservedRunningTime="2025-12-03 19:32:10.138104953 +0000 UTC m=+146.100915219" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.140264 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" event={"ID":"8369d329-fdce-4cd7-ab48-6c50b2a53fea","Type":"ContainerStarted","Data":"897f85647280117507d6f7823df79a01883ff264a7c4e5b947d91784429608dc"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.146035 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:10 crc kubenswrapper[4916]: E1203 19:32:10.146710 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:10.646694229 +0000 UTC m=+146.609504495 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.167184 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" event={"ID":"cf10b43c-dac3-462c-b4d5-66b27b895743","Type":"ContainerStarted","Data":"2fc7ce1e7f7c95548148bffecab91c0645063a3de537232213762d5541938dd7"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.181255 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-xltwc" podStartSLOduration=126.181242069 podStartE2EDuration="2m6.181242069s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.180839739 +0000 UTC m=+146.143650005" watchObservedRunningTime="2025-12-03 19:32:10.181242069 +0000 UTC m=+146.144052325" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.182272 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" event={"ID":"bc4d1fd5-c5dc-471f-9096-7cb5c8f9795c","Type":"ContainerStarted","Data":"e0b76c9103c3dfb827eb8e1118950502b0c0bf9a993b6589f08dd295a852a36b"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.182702 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" podStartSLOduration=126.182697525 podStartE2EDuration="2m6.182697525s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.144907074 +0000 UTC m=+146.107717340" watchObservedRunningTime="2025-12-03 19:32:10.182697525 +0000 UTC m=+146.145507781" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.184492 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-lcv2z" event={"ID":"28b8555a-3084-43be-9d3b-18fd0d993a0d","Type":"ContainerStarted","Data":"b20e637c1c5e5891219fd8d8ab5168d060d6c6c71c5cc4cbc8131206bbb399ee"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.185226 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.186425 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6rv69" event={"ID":"866b0506-2939-4cae-936e-a21d5040cb3f","Type":"ContainerStarted","Data":"4fbac40c946866d22e60e5f829f8a94df957bda18b045ff38e0e559654d2dbed"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.187090 4916 patch_prober.go:28] interesting pod/console-operator-58897d9998-lcv2z container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.187119 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-lcv2z" podUID="28b8555a-3084-43be-9d3b-18fd0d993a0d" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.188732 4916 generic.go:334] "Generic (PLEG): container finished" podID="15f7ce9a-d2ff-40c4-b717-5b78ae4ab388" containerID="792fba61e9f97f3122a3ab57230f09dca7f4cf361bad47df5d8c70f6e645ed15" exitCode=0 Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.188811 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" event={"ID":"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388","Type":"ContainerDied","Data":"792fba61e9f97f3122a3ab57230f09dca7f4cf361bad47df5d8c70f6e645ed15"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.213363 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" event={"ID":"891743f6-cf2a-499d-914c-003f9a0a6875","Type":"ContainerStarted","Data":"7257aa167c09a2963639f11d3475713ffdf874e1c97a0dca7cdf96aaf0fd2a98"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.240927 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-rs2tx" podStartSLOduration=126.24091286 podStartE2EDuration="2m6.24091286s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.240308405 +0000 UTC m=+146.203118671" watchObservedRunningTime="2025-12-03 19:32:10.24091286 +0000 UTC m=+146.203723126" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.259769 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" event={"ID":"6ecd5613-956e-4d47-beba-d572b9415562","Type":"ContainerStarted","Data":"6dfa85e2a244110738ebc20a2aef8e623a751f1958d34ecc15a0104ad26eda95"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.263396 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:10 crc kubenswrapper[4916]: E1203 19:32:10.267483 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:10.767464918 +0000 UTC m=+146.730275184 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.287259 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" event={"ID":"d60ab555-3c24-40c0-917b-3bed070c6ec5","Type":"ContainerStarted","Data":"83abb024bfb8c55f0cbf0cd98bd414a3aa8f5cf5b4da16dea8f5d6dad18af260"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.322728 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" podStartSLOduration=126.322708629 podStartE2EDuration="2m6.322708629s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.296309704 +0000 UTC m=+146.259119970" watchObservedRunningTime="2025-12-03 19:32:10.322708629 +0000 UTC m=+146.285518895" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.323495 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-gkhs6" podStartSLOduration=126.323486878 podStartE2EDuration="2m6.323486878s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.320892483 +0000 UTC m=+146.283702749" watchObservedRunningTime="2025-12-03 19:32:10.323486878 +0000 UTC m=+146.286297144" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.354845 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" event={"ID":"7bad711e-046f-470f-9a18-0cbfadd7f05d","Type":"ContainerStarted","Data":"75c3b0e00c69168ea47b5c01b8d3b5dbfac024b0bf5be84e669f0102260cf878"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.354892 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" event={"ID":"7bad711e-046f-470f-9a18-0cbfadd7f05d","Type":"ContainerStarted","Data":"8374c91fb677eb994295c7a51b71b602cc608f28bb9c87565288d6351252ba3c"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.369501 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:10 crc kubenswrapper[4916]: E1203 19:32:10.374906 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:10.874890032 +0000 UTC m=+146.837700298 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.390099 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" event={"ID":"80ccd9f5-c714-4ee7-9bd1-58363e2d1526","Type":"ContainerStarted","Data":"0a2a5e486e09592bc24365360c7c84b8617ce185124e37c05eceb878ffa27a0f"} Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.390147 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.390578 4916 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-w86mh container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.390629 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" podUID="ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.404264 4916 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-jjrjl container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.404307 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" podUID="80ccd9f5-c714-4ee7-9bd1-58363e2d1526" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.406003 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.440128 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-lcv2z" podStartSLOduration=126.440115763 podStartE2EDuration="2m6.440115763s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.438207535 +0000 UTC m=+146.401017801" watchObservedRunningTime="2025-12-03 19:32:10.440115763 +0000 UTC m=+146.402926019" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.470427 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:10 crc kubenswrapper[4916]: E1203 19:32:10.471403 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:10.97138896 +0000 UTC m=+146.934199226 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.481600 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-n59pd" podStartSLOduration=126.481579786 podStartE2EDuration="2m6.481579786s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.473078962 +0000 UTC m=+146.435889228" watchObservedRunningTime="2025-12-03 19:32:10.481579786 +0000 UTC m=+146.444390042" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.573684 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:10 crc kubenswrapper[4916]: E1203 19:32:10.578019 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:11.078006233 +0000 UTC m=+147.040816499 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.585513 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.585845 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.678155 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:10 crc kubenswrapper[4916]: E1203 19:32:10.678475 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:11.17845982 +0000 UTC m=+147.141270086 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.753504 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-tqd7x" podStartSLOduration=126.753483228 podStartE2EDuration="2m6.753483228s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.752506884 +0000 UTC m=+146.715317150" watchObservedRunningTime="2025-12-03 19:32:10.753483228 +0000 UTC m=+146.716293494" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.754124 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" podStartSLOduration=126.754116064 podStartE2EDuration="2m6.754116064s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:10.690046482 +0000 UTC m=+146.652856768" watchObservedRunningTime="2025-12-03 19:32:10.754116064 +0000 UTC m=+146.716926330" Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.783245 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:10 crc kubenswrapper[4916]: E1203 19:32:10.783797 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:11.283785071 +0000 UTC m=+147.246595337 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.889065 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:10 crc kubenswrapper[4916]: E1203 19:32:10.889473 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:11.38945668 +0000 UTC m=+147.352266946 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:10 crc kubenswrapper[4916]: I1203 19:32:10.990517 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:10 crc kubenswrapper[4916]: E1203 19:32:10.990932 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:11.490915373 +0000 UTC m=+147.453725639 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.069832 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:11 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:11 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:11 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.070210 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.091578 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:11 crc kubenswrapper[4916]: E1203 19:32:11.091757 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:11.59173128 +0000 UTC m=+147.554541546 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.091884 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:11 crc kubenswrapper[4916]: E1203 19:32:11.092258 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:11.592251673 +0000 UTC m=+147.555061939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.301716 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:11 crc kubenswrapper[4916]: E1203 19:32:11.302092 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:11.802077833 +0000 UTC m=+147.764888099 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.403377 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:11 crc kubenswrapper[4916]: E1203 19:32:11.403738 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:11.90372587 +0000 UTC m=+147.866536136 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.413064 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" event={"ID":"e6db190a-b5a9-42da-bad1-63b1429be0a9","Type":"ContainerStarted","Data":"526e960674fcd2e1b3a7f6ec994516bbe57b2e4c7e36e28f3cc8c950500b811f"} Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.458951 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" event={"ID":"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388","Type":"ContainerStarted","Data":"2f3f2655e18a3d7f1927e56443346ca6c9364c890adbdd7781dcf3a298fb1e07"} Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.466354 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zlg5c" podStartSLOduration=127.466324916 podStartE2EDuration="2m7.466324916s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:11.446123817 +0000 UTC m=+147.408934083" watchObservedRunningTime="2025-12-03 19:32:11.466324916 +0000 UTC m=+147.429135182" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.489060 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ckbgp" event={"ID":"630defa6-81c8-4d9f-84bf-ef45e55be900","Type":"ContainerStarted","Data":"7911dfebeefaf665af16bffce33a498195507a41bd002960a0775fc2f36487fe"} Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.489317 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.511847 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" event={"ID":"ecf7536e-fc5a-4993-b4c9-e0a560dd1cbb","Type":"ContainerStarted","Data":"078d4d5488b0cc5f9a25ad37271db30c4acf19801434e7e346ab4369969b9a03"} Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.513271 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.513837 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:11 crc kubenswrapper[4916]: E1203 19:32:11.514787 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.014771355 +0000 UTC m=+147.977581621 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.537385 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-ckbgp" podStartSLOduration=8.537367003 podStartE2EDuration="8.537367003s" podCreationTimestamp="2025-12-03 19:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:11.535061215 +0000 UTC m=+147.497871501" watchObservedRunningTime="2025-12-03 19:32:11.537367003 +0000 UTC m=+147.500177269" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.554914 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" event={"ID":"cf10b43c-dac3-462c-b4d5-66b27b895743","Type":"ContainerStarted","Data":"8ec9cc328e69144df055e993390499ac4637f3f639921496733ce7b98968a9a6"} Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.571921 4916 generic.go:334] "Generic (PLEG): container finished" podID="d60ab555-3c24-40c0-917b-3bed070c6ec5" containerID="83abb024bfb8c55f0cbf0cd98bd414a3aa8f5cf5b4da16dea8f5d6dad18af260" exitCode=0 Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.572002 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" event={"ID":"d60ab555-3c24-40c0-917b-3bed070c6ec5","Type":"ContainerDied","Data":"83abb024bfb8c55f0cbf0cd98bd414a3aa8f5cf5b4da16dea8f5d6dad18af260"} Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.572035 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" event={"ID":"d60ab555-3c24-40c0-917b-3bed070c6ec5","Type":"ContainerStarted","Data":"811f75915405ba9381db017e19a5946d0c93252c98d36d63333b68fb1cf87a41"} Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.572673 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.615385 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:11 crc kubenswrapper[4916]: E1203 19:32:11.617147 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.117133361 +0000 UTC m=+148.079943627 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.618933 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" event={"ID":"75a73af8-9e9b-4ade-985b-1b4418ffc955","Type":"ContainerStarted","Data":"6fec1699a2e2f488d73fc708d4e5133f84e4cad23e563e6f2c2e5be8d661539f"} Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.626985 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-lk8q6" podStartSLOduration=127.626967498 podStartE2EDuration="2m7.626967498s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:11.576118258 +0000 UTC m=+147.538928524" watchObservedRunningTime="2025-12-03 19:32:11.626967498 +0000 UTC m=+147.589777764" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.656298 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" event={"ID":"4c7cc56e-1da2-4fa4-a402-8b2d407caf39","Type":"ContainerStarted","Data":"44eb6bcf2b15cae2d03f5eaa261285d26f7ec98aa255552a236e9a5ec9f5c86a"} Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.657127 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.675041 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" podStartSLOduration=127.675020857 podStartE2EDuration="2m7.675020857s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:11.657517177 +0000 UTC m=+147.620327453" watchObservedRunningTime="2025-12-03 19:32:11.675020857 +0000 UTC m=+147.637831123" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.679932 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk" event={"ID":"33932edb-c741-4dac-a93a-ab01a3eae54d","Type":"ContainerStarted","Data":"bce7301a793bff55d52624c52fffd010ad4a2051b4e2e8c5029afbb52db4c8ec"} Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.692157 4916 patch_prober.go:28] interesting pod/downloads-7954f5f757-dsf48 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.692215 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dsf48" podUID="d83669b5-21c4-48ad-99f9-5abccbf369a3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.692942 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jjrjl" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.692980 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l5n96" podStartSLOduration=127.692962889 podStartE2EDuration="2m7.692962889s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:11.691266476 +0000 UTC m=+147.654076742" watchObservedRunningTime="2025-12-03 19:32:11.692962889 +0000 UTC m=+147.655773155" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.708864 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hhkb7" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.717025 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:11 crc kubenswrapper[4916]: E1203 19:32:11.728068 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.228037841 +0000 UTC m=+148.190848147 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.751598 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-n4ps5" podStartSLOduration=127.751555163 podStartE2EDuration="2m7.751555163s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:11.73714549 +0000 UTC m=+147.699955756" watchObservedRunningTime="2025-12-03 19:32:11.751555163 +0000 UTC m=+147.714365419" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.817624 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bg7xx" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.828318 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:11 crc kubenswrapper[4916]: E1203 19:32:11.829880 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.329866564 +0000 UTC m=+148.292676830 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.848842 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" podStartSLOduration=127.848825991 podStartE2EDuration="2m7.848825991s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:11.789472387 +0000 UTC m=+147.752282653" watchObservedRunningTime="2025-12-03 19:32:11.848825991 +0000 UTC m=+147.811636257" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.925313 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qn47d" podStartSLOduration=127.925296145 podStartE2EDuration="2m7.925296145s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:11.92430622 +0000 UTC m=+147.887116486" watchObservedRunningTime="2025-12-03 19:32:11.925296145 +0000 UTC m=+147.888106411" Dec 03 19:32:11 crc kubenswrapper[4916]: I1203 19:32:11.929480 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:11 crc kubenswrapper[4916]: E1203 19:32:11.929775 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.429759047 +0000 UTC m=+148.392569313 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.006616 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-9d5sk" podStartSLOduration=128.006591451 podStartE2EDuration="2m8.006591451s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:12.001956625 +0000 UTC m=+147.964766891" watchObservedRunningTime="2025-12-03 19:32:12.006591451 +0000 UTC m=+147.969401727" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.030480 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:12 crc kubenswrapper[4916]: E1203 19:32:12.030926 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.530908573 +0000 UTC m=+148.493718839 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.069798 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:12 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:12 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:12 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.069854 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.184266 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:12 crc kubenswrapper[4916]: E1203 19:32:12.184608 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.68459254 +0000 UTC m=+148.647402806 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.184659 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:12 crc kubenswrapper[4916]: E1203 19:32:12.184957 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.684942299 +0000 UTC m=+148.647752565 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.285945 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:12 crc kubenswrapper[4916]: E1203 19:32:12.286260 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.786237738 +0000 UTC m=+148.749048044 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.286354 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:12 crc kubenswrapper[4916]: E1203 19:32:12.286647 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.786637168 +0000 UTC m=+148.749447434 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.387284 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.387532 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.387697 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:32:12 crc kubenswrapper[4916]: E1203 19:32:12.388642 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:12.888619084 +0000 UTC m=+148.851429340 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.393191 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.452981 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.494661 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.494728 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.494769 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.528528 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.555863 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:32:12 crc kubenswrapper[4916]: E1203 19:32:12.556228 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:13.056215012 +0000 UTC m=+149.019025268 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.556697 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.557314 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.557601 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.595660 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:12 crc kubenswrapper[4916]: E1203 19:32:12.595917 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:13.09589764 +0000 UTC m=+149.058707896 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.684037 4916 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-w4s7m container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.43:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.684107 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" podUID="7ecc55b8-0cbd-4637-9479-f7f0286d0a0b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.43:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.684654 4916 patch_prober.go:28] interesting pod/console-operator-58897d9998-lcv2z container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.684729 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-lcv2z" podUID="28b8555a-3084-43be-9d3b-18fd0d993a0d" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.700372 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:12 crc kubenswrapper[4916]: E1203 19:32:12.700713 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:13.200700567 +0000 UTC m=+149.163510833 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.789190 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6rv69" event={"ID":"866b0506-2939-4cae-936e-a21d5040cb3f","Type":"ContainerStarted","Data":"5c0e83456c082a4d3aa22fbd3f4431a263ce5a9e3cded541e6b00616f141f372"} Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.811454 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:12 crc kubenswrapper[4916]: E1203 19:32:12.811738 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:13.311718841 +0000 UTC m=+149.274529107 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.875617 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" event={"ID":"15f7ce9a-d2ff-40c4-b717-5b78ae4ab388","Type":"ContainerStarted","Data":"d77b0201ee1687133a2f9215c6de070b99e5914c00756134eb0205363974b20f"} Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.878751 4916 patch_prober.go:28] interesting pod/downloads-7954f5f757-dsf48 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.878813 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dsf48" podUID="d83669b5-21c4-48ad-99f9-5abccbf369a3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.913413 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:12 crc kubenswrapper[4916]: E1203 19:32:12.918312 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:13.418297093 +0000 UTC m=+149.381107359 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.934716 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pvd7g"] Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.935635 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.942404 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.945770 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pvd7g"] Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.949135 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-lcv2z" Dec 03 19:32:12 crc kubenswrapper[4916]: I1203 19:32:12.982683 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" podStartSLOduration=128.982665812 podStartE2EDuration="2m8.982665812s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:12.973003999 +0000 UTC m=+148.935814255" watchObservedRunningTime="2025-12-03 19:32:12.982665812 +0000 UTC m=+148.945476078" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.017744 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:13 crc kubenswrapper[4916]: E1203 19:32:13.018267 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:13.518244118 +0000 UTC m=+149.481054384 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.083725 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:13 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:13 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:13 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.083778 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.121388 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-catalog-content\") pod \"community-operators-pvd7g\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.121476 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-utilities\") pod \"community-operators-pvd7g\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.121516 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8rp4\" (UniqueName: \"kubernetes.io/projected/0551f9a0-8ac5-4b28-bf49-b507428e6b05-kube-api-access-l8rp4\") pod \"community-operators-pvd7g\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.121549 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:13 crc kubenswrapper[4916]: E1203 19:32:13.121913 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:13.621900286 +0000 UTC m=+149.584710552 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.223047 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.223544 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8rp4\" (UniqueName: \"kubernetes.io/projected/0551f9a0-8ac5-4b28-bf49-b507428e6b05-kube-api-access-l8rp4\") pod \"community-operators-pvd7g\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.223635 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-catalog-content\") pod \"community-operators-pvd7g\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.223689 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-utilities\") pod \"community-operators-pvd7g\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.224081 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-utilities\") pod \"community-operators-pvd7g\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:32:13 crc kubenswrapper[4916]: E1203 19:32:13.224398 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:13.724383285 +0000 UTC m=+149.687193541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.224837 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-catalog-content\") pod \"community-operators-pvd7g\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.253683 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8rp4\" (UniqueName: \"kubernetes.io/projected/0551f9a0-8ac5-4b28-bf49-b507428e6b05-kube-api-access-l8rp4\") pod \"community-operators-pvd7g\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.265982 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.287891 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nq575"] Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.289050 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nq575" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.326629 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nq575"] Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.327342 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.327394 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-catalog-content\") pod \"community-operators-nq575\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " pod="openshift-marketplace/community-operators-nq575" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.327432 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bg2r2\" (UniqueName: \"kubernetes.io/projected/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-kube-api-access-bg2r2\") pod \"community-operators-nq575\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " pod="openshift-marketplace/community-operators-nq575" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.327473 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-utilities\") pod \"community-operators-nq575\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " pod="openshift-marketplace/community-operators-nq575" Dec 03 19:32:13 crc kubenswrapper[4916]: E1203 19:32:13.335142 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:13.835123801 +0000 UTC m=+149.797934067 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.383463 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-w4s7m" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.429071 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:13 crc kubenswrapper[4916]: E1203 19:32:13.429184 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:13.929165227 +0000 UTC m=+149.891975493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.429762 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.429797 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-catalog-content\") pod \"community-operators-nq575\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " pod="openshift-marketplace/community-operators-nq575" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.429833 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bg2r2\" (UniqueName: \"kubernetes.io/projected/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-kube-api-access-bg2r2\") pod \"community-operators-nq575\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " pod="openshift-marketplace/community-operators-nq575" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.429869 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-utilities\") pod \"community-operators-nq575\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " pod="openshift-marketplace/community-operators-nq575" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.430325 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-utilities\") pod \"community-operators-nq575\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " pod="openshift-marketplace/community-operators-nq575" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.436025 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-catalog-content\") pod \"community-operators-nq575\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " pod="openshift-marketplace/community-operators-nq575" Dec 03 19:32:13 crc kubenswrapper[4916]: E1203 19:32:13.436054 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:13.936031689 +0000 UTC m=+149.898841955 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.517392 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bg2r2\" (UniqueName: \"kubernetes.io/projected/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-kube-api-access-bg2r2\") pod \"community-operators-nq575\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " pod="openshift-marketplace/community-operators-nq575" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.518684 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-r4lq2"] Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.530827 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:13 crc kubenswrapper[4916]: E1203 19:32:13.531161 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:14.031143633 +0000 UTC m=+149.993953899 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.539065 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.548592 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.552530 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r4lq2"] Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.634701 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:13 crc kubenswrapper[4916]: E1203 19:32:13.635123 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:14.135107309 +0000 UTC m=+150.097917575 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.687846 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nq575" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.693636 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s7w54"] Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.694772 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.716038 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s7w54"] Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.758458 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.759018 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcfsh\" (UniqueName: \"kubernetes.io/projected/0d597107-0497-411e-8b94-fa47d1ddd065-kube-api-access-gcfsh\") pod \"certified-operators-s7w54\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.759071 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-utilities\") pod \"certified-operators-s7w54\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.759094 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-catalog-content\") pod \"certified-operators-s7w54\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.759124 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-utilities\") pod \"certified-operators-r4lq2\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.759144 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-catalog-content\") pod \"certified-operators-r4lq2\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.759175 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ptx2\" (UniqueName: \"kubernetes.io/projected/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-kube-api-access-5ptx2\") pod \"certified-operators-r4lq2\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:32:13 crc kubenswrapper[4916]: E1203 19:32:13.759392 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:14.259373076 +0000 UTC m=+150.222183342 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.865319 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-catalog-content\") pod \"certified-operators-s7w54\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.865374 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-utilities\") pod \"certified-operators-r4lq2\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.865397 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-catalog-content\") pod \"certified-operators-r4lq2\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.865426 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ptx2\" (UniqueName: \"kubernetes.io/projected/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-kube-api-access-5ptx2\") pod \"certified-operators-r4lq2\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.865508 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.865542 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcfsh\" (UniqueName: \"kubernetes.io/projected/0d597107-0497-411e-8b94-fa47d1ddd065-kube-api-access-gcfsh\") pod \"certified-operators-s7w54\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.865601 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-utilities\") pod \"certified-operators-s7w54\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.866073 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-utilities\") pod \"certified-operators-s7w54\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.866345 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-catalog-content\") pod \"certified-operators-s7w54\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.866738 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-utilities\") pod \"certified-operators-r4lq2\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:32:13 crc kubenswrapper[4916]: E1203 19:32:13.867214 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:14.367192659 +0000 UTC m=+150.330002995 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.867376 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-catalog-content\") pod \"certified-operators-r4lq2\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.877691 4916 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.899383 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcfsh\" (UniqueName: \"kubernetes.io/projected/0d597107-0497-411e-8b94-fa47d1ddd065-kube-api-access-gcfsh\") pod \"certified-operators-s7w54\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.926912 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ptx2\" (UniqueName: \"kubernetes.io/projected/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-kube-api-access-5ptx2\") pod \"certified-operators-r4lq2\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.939820 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.965436 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6rv69" event={"ID":"866b0506-2939-4cae-936e-a21d5040cb3f","Type":"ContainerStarted","Data":"ec89a9b7c6ad8b254aebf0e867b2a19b972916ae70458d3c1f3fd63c7b43144c"} Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.966124 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:13 crc kubenswrapper[4916]: E1203 19:32:13.966388 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:14.466373615 +0000 UTC m=+150.429183881 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:13 crc kubenswrapper[4916]: I1203 19:32:13.992541 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"8805253174d61f17e580ac943efefebeaa6fe234a9493958891a113ae555e5cf"} Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.026180 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rvjw7" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.068003 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:14 crc kubenswrapper[4916]: E1203 19:32:14.080955 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:14.580939498 +0000 UTC m=+150.543749754 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.082068 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.099362 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.100001 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:14 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:14 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:14 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.100052 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.100365 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.100492 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.107257 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.107504 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.168864 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.169025 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e6849e2-9e49-4ca7-a868-66d7835562e2-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5e6849e2-9e49-4ca7-a868-66d7835562e2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.169052 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e6849e2-9e49-4ca7-a868-66d7835562e2-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5e6849e2-9e49-4ca7-a868-66d7835562e2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 19:32:14 crc kubenswrapper[4916]: E1203 19:32:14.169203 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:14.669187808 +0000 UTC m=+150.631998074 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.270289 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e6849e2-9e49-4ca7-a868-66d7835562e2-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5e6849e2-9e49-4ca7-a868-66d7835562e2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.270622 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.270701 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e6849e2-9e49-4ca7-a868-66d7835562e2-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5e6849e2-9e49-4ca7-a868-66d7835562e2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.270769 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e6849e2-9e49-4ca7-a868-66d7835562e2-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5e6849e2-9e49-4ca7-a868-66d7835562e2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 19:32:14 crc kubenswrapper[4916]: E1203 19:32:14.271281 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:14.771269617 +0000 UTC m=+150.734079883 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.314979 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e6849e2-9e49-4ca7-a868-66d7835562e2-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5e6849e2-9e49-4ca7-a868-66d7835562e2\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.359389 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nq575"] Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.375094 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:14 crc kubenswrapper[4916]: E1203 19:32:14.375463 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:14.875448548 +0000 UTC m=+150.838258814 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.378452 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pvd7g"] Dec 03 19:32:14 crc kubenswrapper[4916]: W1203 19:32:14.449076 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0551f9a0_8ac5_4b28_bf49_b507428e6b05.slice/crio-dcb9c011eac2ff4f5753423e044753f4f2f2e075e39378f6fed7615a74d3fb27 WatchSource:0}: Error finding container dcb9c011eac2ff4f5753423e044753f4f2f2e075e39378f6fed7615a74d3fb27: Status 404 returned error can't find the container with id dcb9c011eac2ff4f5753423e044753f4f2f2e075e39378f6fed7615a74d3fb27 Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.454385 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.478176 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:14 crc kubenswrapper[4916]: E1203 19:32:14.478524 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:14.978512382 +0000 UTC m=+150.941322648 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.592274 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:14 crc kubenswrapper[4916]: E1203 19:32:14.592661 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 19:32:15.092643664 +0000 UTC m=+151.055453940 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.687770 4916 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-03T19:32:13.877712334Z","Handler":null,"Name":""} Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.693149 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:14 crc kubenswrapper[4916]: E1203 19:32:14.693477 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 19:32:15.193464981 +0000 UTC m=+151.156275247 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwgl6" (UID: "151b79c1-f797-460a-9883-5af28efabd61") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.694722 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s7w54"] Dec 03 19:32:14 crc kubenswrapper[4916]: W1203 19:32:14.734484 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d597107_0497_411e_8b94_fa47d1ddd065.slice/crio-1b8ade59172824c1e3e6db57553bda2784c8224d3ea43c4cd16de060a12fc277 WatchSource:0}: Error finding container 1b8ade59172824c1e3e6db57553bda2784c8224d3ea43c4cd16de060a12fc277: Status 404 returned error can't find the container with id 1b8ade59172824c1e3e6db57553bda2784c8224d3ea43c4cd16de060a12fc277 Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.734616 4916 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.734646 4916 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.781831 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r4lq2"] Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.794624 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.850718 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.896850 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.905714 4916 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.905757 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:14 crc kubenswrapper[4916]: W1203 19:32:14.918146 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83b8e59d_6c7f_4ee7_836b_ae7c6ca444fe.slice/crio-1e030ea98f92cf6e44712d1f4398afd0757835cd341c491d8a14a13afb7d9c55 WatchSource:0}: Error finding container 1e030ea98f92cf6e44712d1f4398afd0757835cd341c491d8a14a13afb7d9c55: Status 404 returned error can't find the container with id 1e030ea98f92cf6e44712d1f4398afd0757835cd341c491d8a14a13afb7d9c55 Dec 03 19:32:14 crc kubenswrapper[4916]: I1203 19:32:14.976989 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.000187 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c6e560e49ecb0258b654de189dda11a2a4ac84a3a0da52e8b7b7038edff97ee3"} Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.000245 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"884a5753495426d4afdc80556e00a0b6b1dfc905c857affc56de22f1ef160347"} Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.011494 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvd7g" event={"ID":"0551f9a0-8ac5-4b28-bf49-b507428e6b05","Type":"ContainerStarted","Data":"dcb9c011eac2ff4f5753423e044753f4f2f2e075e39378f6fed7615a74d3fb27"} Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.012721 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7w54" event={"ID":"0d597107-0497-411e-8b94-fa47d1ddd065","Type":"ContainerStarted","Data":"1b8ade59172824c1e3e6db57553bda2784c8224d3ea43c4cd16de060a12fc277"} Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.016077 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nq575" event={"ID":"d46fd03d-fe64-42cf-9e43-aab1f8c7519f","Type":"ContainerStarted","Data":"8d12fd0bbae829b83704c63978341ed86959cc635c39e64ae9160776d6ba980d"} Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.029044 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwgl6\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.029794 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"b54a77ceb88886b569a1d967cb76bb49a9fc6bf122799b9c2282f441e3795c99"} Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.031075 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"efb9df29e48f9694557f1699dc68cc34930fb6270e9b8f68b1e40320a8d0128b"} Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.031301 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.054352 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.055727 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-6rv69" event={"ID":"866b0506-2939-4cae-936e-a21d5040cb3f","Type":"ContainerStarted","Data":"071e9bb9d09bb36577a35c81362e4156373a814656190a02e225654da49cbb40"} Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.062749 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4lq2" event={"ID":"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe","Type":"ContainerStarted","Data":"1e030ea98f92cf6e44712d1f4398afd0757835cd341c491d8a14a13afb7d9c55"} Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.068291 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:15 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:15 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:15 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.068366 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.070456 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"257af95c373dbd98585ed08a99f5343469f7d219d6663067dfd9147cfc34a0a0"} Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.124785 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-6rv69" podStartSLOduration=12.124766414 podStartE2EDuration="12.124766414s" podCreationTimestamp="2025-12-03 19:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:15.097731943 +0000 UTC m=+151.060542229" watchObservedRunningTime="2025-12-03 19:32:15.124766414 +0000 UTC m=+151.087576680" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.433605 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwgl6"] Dec 03 19:32:15 crc kubenswrapper[4916]: W1203 19:32:15.444909 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod151b79c1_f797_460a_9883_5af28efabd61.slice/crio-d6fb74c385eaacb67dfddc57c9a9f16b40590ecde16360bf16f03cbc20d8df68 WatchSource:0}: Error finding container d6fb74c385eaacb67dfddc57c9a9f16b40590ecde16360bf16f03cbc20d8df68: Status 404 returned error can't find the container with id d6fb74c385eaacb67dfddc57c9a9f16b40590ecde16360bf16f03cbc20d8df68 Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.481897 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h8kd6"] Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.483169 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.486001 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.500664 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h8kd6"] Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.507627 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-utilities\") pod \"redhat-marketplace-h8kd6\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.507666 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-catalog-content\") pod \"redhat-marketplace-h8kd6\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.507683 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mshqj\" (UniqueName: \"kubernetes.io/projected/ac59f68d-2f62-419c-9244-ea9ee95242f8-kube-api-access-mshqj\") pod \"redhat-marketplace-h8kd6\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.610219 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-catalog-content\") pod \"redhat-marketplace-h8kd6\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.610493 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mshqj\" (UniqueName: \"kubernetes.io/projected/ac59f68d-2f62-419c-9244-ea9ee95242f8-kube-api-access-mshqj\") pod \"redhat-marketplace-h8kd6\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.610674 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-utilities\") pod \"redhat-marketplace-h8kd6\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.610966 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-catalog-content\") pod \"redhat-marketplace-h8kd6\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.611309 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-utilities\") pod \"redhat-marketplace-h8kd6\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.650900 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.651207 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.651934 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mshqj\" (UniqueName: \"kubernetes.io/projected/ac59f68d-2f62-419c-9244-ea9ee95242f8-kube-api-access-mshqj\") pod \"redhat-marketplace-h8kd6\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.654132 4916 patch_prober.go:28] interesting pod/console-f9d7485db-cdztl container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.654172 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-cdztl" podUID="1d18bd86-a58f-451c-90c0-9fa9834c6d77" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.670004 4916 patch_prober.go:28] interesting pod/downloads-7954f5f757-dsf48 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.670062 4916 patch_prober.go:28] interesting pod/downloads-7954f5f757-dsf48 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.670117 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dsf48" podUID="d83669b5-21c4-48ad-99f9-5abccbf369a3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.670069 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-dsf48" podUID="d83669b5-21c4-48ad-99f9-5abccbf369a3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.818913 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.881847 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n7fbp"] Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.905493 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.909828 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n7fbp"] Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.914334 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-utilities\") pod \"redhat-marketplace-n7fbp\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.914387 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzfhf\" (UniqueName: \"kubernetes.io/projected/9f268348-698e-4764-8eb6-3ce43fdc4f9b-kube-api-access-nzfhf\") pod \"redhat-marketplace-n7fbp\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.914404 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-catalog-content\") pod \"redhat-marketplace-n7fbp\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:32:15 crc kubenswrapper[4916]: I1203 19:32:15.963925 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.018200 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-utilities\") pod \"redhat-marketplace-n7fbp\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.018263 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzfhf\" (UniqueName: \"kubernetes.io/projected/9f268348-698e-4764-8eb6-3ce43fdc4f9b-kube-api-access-nzfhf\") pod \"redhat-marketplace-n7fbp\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.018283 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-catalog-content\") pod \"redhat-marketplace-n7fbp\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.018881 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-utilities\") pod \"redhat-marketplace-n7fbp\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.021934 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-catalog-content\") pod \"redhat-marketplace-n7fbp\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.040257 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzfhf\" (UniqueName: \"kubernetes.io/projected/9f268348-698e-4764-8eb6-3ce43fdc4f9b-kube-api-access-nzfhf\") pod \"redhat-marketplace-n7fbp\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.063908 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.068827 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:16 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:16 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:16 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.068875 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.074288 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h8kd6"] Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.078186 4916 generic.go:334] "Generic (PLEG): container finished" podID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" containerID="4ffd340fd79cf6c8c80a5f54828ec9cb3dc43d72877f2e79f6530a4c92c28644" exitCode=0 Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.078289 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nq575" event={"ID":"d46fd03d-fe64-42cf-9e43-aab1f8c7519f","Type":"ContainerDied","Data":"4ffd340fd79cf6c8c80a5f54828ec9cb3dc43d72877f2e79f6530a4c92c28644"} Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.079410 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.081712 4916 generic.go:334] "Generic (PLEG): container finished" podID="5e6849e2-9e49-4ca7-a868-66d7835562e2" containerID="a916d485a197efde7f0888865527c7d82f1eef816a6f162265224ad650d04549" exitCode=0 Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.081758 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5e6849e2-9e49-4ca7-a868-66d7835562e2","Type":"ContainerDied","Data":"a916d485a197efde7f0888865527c7d82f1eef816a6f162265224ad650d04549"} Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.081779 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5e6849e2-9e49-4ca7-a868-66d7835562e2","Type":"ContainerStarted","Data":"87eaa6c03ee32506bad4f68baa614b240e24581a807961e99088f8ef0aa8da86"} Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.082711 4916 generic.go:334] "Generic (PLEG): container finished" podID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" containerID="acc63faaaeabde4a8abd03def0004f85ad5b0cf1b7edf047b0ac6328643cc1c1" exitCode=0 Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.082747 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4lq2" event={"ID":"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe","Type":"ContainerDied","Data":"acc63faaaeabde4a8abd03def0004f85ad5b0cf1b7edf047b0ac6328643cc1c1"} Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.085448 4916 generic.go:334] "Generic (PLEG): container finished" podID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" containerID="7cdeaabd72c78450e053c4a902ee80dcc6fbd3b900d8a795f1cb5f84426fe646" exitCode=0 Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.085486 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvd7g" event={"ID":"0551f9a0-8ac5-4b28-bf49-b507428e6b05","Type":"ContainerDied","Data":"7cdeaabd72c78450e053c4a902ee80dcc6fbd3b900d8a795f1cb5f84426fe646"} Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.091321 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" event={"ID":"151b79c1-f797-460a-9883-5af28efabd61","Type":"ContainerStarted","Data":"1e2d71c0197031482325678860ea120bc2517ee8f33bca7f93022ba00eddf1d3"} Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.091358 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" event={"ID":"151b79c1-f797-460a-9883-5af28efabd61","Type":"ContainerStarted","Data":"d6fb74c385eaacb67dfddc57c9a9f16b40590ecde16360bf16f03cbc20d8df68"} Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.092403 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.111393 4916 generic.go:334] "Generic (PLEG): container finished" podID="4ce6a756-7c72-45f6-abb8-96d9597b7429" containerID="e7e03eda1780dc42fa5b5def994d10f4439d824dedf641dedd995fc87813df52" exitCode=0 Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.111488 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" event={"ID":"4ce6a756-7c72-45f6-abb8-96d9597b7429","Type":"ContainerDied","Data":"e7e03eda1780dc42fa5b5def994d10f4439d824dedf641dedd995fc87813df52"} Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.119490 4916 generic.go:334] "Generic (PLEG): container finished" podID="0d597107-0497-411e-8b94-fa47d1ddd065" containerID="4117dd97c2983eb82524d61a3b3c8d70f7b6bf603d679b3bc44818b942db41d8" exitCode=0 Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.120360 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7w54" event={"ID":"0d597107-0497-411e-8b94-fa47d1ddd065","Type":"ContainerDied","Data":"4117dd97c2983eb82524d61a3b3c8d70f7b6bf603d679b3bc44818b942db41d8"} Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.158718 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" podStartSLOduration=132.158700601 podStartE2EDuration="2m12.158700601s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:16.132889612 +0000 UTC m=+152.095699878" watchObservedRunningTime="2025-12-03 19:32:16.158700601 +0000 UTC m=+152.121510867" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.159096 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.159177 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.160439 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.160468 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.180687 4916 patch_prober.go:28] interesting pod/apiserver-76f77b778f-87mhb container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 03 19:32:16 crc kubenswrapper[4916]: [+]log ok Dec 03 19:32:16 crc kubenswrapper[4916]: [+]etcd ok Dec 03 19:32:16 crc kubenswrapper[4916]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 03 19:32:16 crc kubenswrapper[4916]: [+]poststarthook/generic-apiserver-start-informers ok Dec 03 19:32:16 crc kubenswrapper[4916]: [+]poststarthook/max-in-flight-filter ok Dec 03 19:32:16 crc kubenswrapper[4916]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 03 19:32:16 crc kubenswrapper[4916]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 03 19:32:16 crc kubenswrapper[4916]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 03 19:32:16 crc kubenswrapper[4916]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 03 19:32:16 crc kubenswrapper[4916]: [+]poststarthook/project.openshift.io-projectcache ok Dec 03 19:32:16 crc kubenswrapper[4916]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 03 19:32:16 crc kubenswrapper[4916]: [+]poststarthook/openshift.io-startinformers ok Dec 03 19:32:16 crc kubenswrapper[4916]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 03 19:32:16 crc kubenswrapper[4916]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 03 19:32:16 crc kubenswrapper[4916]: livez check failed Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.180752 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" podUID="15f7ce9a-d2ff-40c4-b717-5b78ae4ab388" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.232400 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.296147 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n2hmt"] Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.297595 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.302910 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.306247 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n2hmt"] Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.429841 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rstx6\" (UniqueName: \"kubernetes.io/projected/4e7df06a-1d77-40e3-916c-581b46b747eb-kube-api-access-rstx6\") pod \"redhat-operators-n2hmt\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.430231 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-catalog-content\") pod \"redhat-operators-n2hmt\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.430266 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-utilities\") pod \"redhat-operators-n2hmt\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.509467 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.510152 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-k6d96"] Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.511368 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.532146 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rstx6\" (UniqueName: \"kubernetes.io/projected/4e7df06a-1d77-40e3-916c-581b46b747eb-kube-api-access-rstx6\") pod \"redhat-operators-n2hmt\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.532194 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-catalog-content\") pod \"redhat-operators-n2hmt\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.532215 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-utilities\") pod \"redhat-operators-n2hmt\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.532753 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k6d96"] Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.533012 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-utilities\") pod \"redhat-operators-n2hmt\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.533185 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-catalog-content\") pod \"redhat-operators-n2hmt\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.588597 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rstx6\" (UniqueName: \"kubernetes.io/projected/4e7df06a-1d77-40e3-916c-581b46b747eb-kube-api-access-rstx6\") pod \"redhat-operators-n2hmt\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.624389 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.633908 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxzlf\" (UniqueName: \"kubernetes.io/projected/d4896794-1b4d-4663-aa0b-18c59d5c8486-kube-api-access-mxzlf\") pod \"redhat-operators-k6d96\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.633975 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-utilities\") pod \"redhat-operators-k6d96\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.634011 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-catalog-content\") pod \"redhat-operators-k6d96\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.668500 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n7fbp"] Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.735436 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxzlf\" (UniqueName: \"kubernetes.io/projected/d4896794-1b4d-4663-aa0b-18c59d5c8486-kube-api-access-mxzlf\") pod \"redhat-operators-k6d96\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.735495 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-utilities\") pod \"redhat-operators-k6d96\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.735528 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-catalog-content\") pod \"redhat-operators-k6d96\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.736047 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-catalog-content\") pod \"redhat-operators-k6d96\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.736196 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-utilities\") pod \"redhat-operators-k6d96\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.761189 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxzlf\" (UniqueName: \"kubernetes.io/projected/d4896794-1b4d-4663-aa0b-18c59d5c8486-kube-api-access-mxzlf\") pod \"redhat-operators-k6d96\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:32:16 crc kubenswrapper[4916]: I1203 19:32:16.889809 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.065908 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:17 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:17 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:17 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.066222 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.090514 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n2hmt"] Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.140921 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h8kd6" event={"ID":"ac59f68d-2f62-419c-9244-ea9ee95242f8","Type":"ContainerDied","Data":"0062423d42bac3e43599598c3863e500a7d9c40be5a512428982869a20d48004"} Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.141008 4916 generic.go:334] "Generic (PLEG): container finished" podID="ac59f68d-2f62-419c-9244-ea9ee95242f8" containerID="0062423d42bac3e43599598c3863e500a7d9c40be5a512428982869a20d48004" exitCode=0 Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.141063 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h8kd6" event={"ID":"ac59f68d-2f62-419c-9244-ea9ee95242f8","Type":"ContainerStarted","Data":"32bb40770fc2c133476353d68167708995c682da63c540a09aa941bcc9d83460"} Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.143002 4916 generic.go:334] "Generic (PLEG): container finished" podID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" containerID="53770bb90872df3d98f3fb6da38df266f7a2eaa6b1270a9c78edd889db613a99" exitCode=0 Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.143545 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7fbp" event={"ID":"9f268348-698e-4764-8eb6-3ce43fdc4f9b","Type":"ContainerDied","Data":"53770bb90872df3d98f3fb6da38df266f7a2eaa6b1270a9c78edd889db613a99"} Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.143586 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7fbp" event={"ID":"9f268348-698e-4764-8eb6-3ce43fdc4f9b","Type":"ContainerStarted","Data":"329616fc384b0090a23bc2ae47f2b733131327c763f394430a67cab694eac89c"} Dec 03 19:32:17 crc kubenswrapper[4916]: W1203 19:32:17.179242 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e7df06a_1d77_40e3_916c_581b46b747eb.slice/crio-03425f39f8e308665b4fc62a5d9e5e2f52ff7f14181843c2ac095d3a09d9d002 WatchSource:0}: Error finding container 03425f39f8e308665b4fc62a5d9e5e2f52ff7f14181843c2ac095d3a09d9d002: Status 404 returned error can't find the container with id 03425f39f8e308665b4fc62a5d9e5e2f52ff7f14181843c2ac095d3a09d9d002 Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.753650 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.755080 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.940257 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e6849e2-9e49-4ca7-a868-66d7835562e2-kube-api-access\") pod \"5e6849e2-9e49-4ca7-a868-66d7835562e2\" (UID: \"5e6849e2-9e49-4ca7-a868-66d7835562e2\") " Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.941390 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ce6a756-7c72-45f6-abb8-96d9597b7429-config-volume\") pod \"4ce6a756-7c72-45f6-abb8-96d9597b7429\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.941546 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqxpm\" (UniqueName: \"kubernetes.io/projected/4ce6a756-7c72-45f6-abb8-96d9597b7429-kube-api-access-vqxpm\") pod \"4ce6a756-7c72-45f6-abb8-96d9597b7429\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.941878 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e6849e2-9e49-4ca7-a868-66d7835562e2-kubelet-dir\") pod \"5e6849e2-9e49-4ca7-a868-66d7835562e2\" (UID: \"5e6849e2-9e49-4ca7-a868-66d7835562e2\") " Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.942190 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ce6a756-7c72-45f6-abb8-96d9597b7429-secret-volume\") pod \"4ce6a756-7c72-45f6-abb8-96d9597b7429\" (UID: \"4ce6a756-7c72-45f6-abb8-96d9597b7429\") " Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.942495 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ce6a756-7c72-45f6-abb8-96d9597b7429-config-volume" (OuterVolumeSpecName: "config-volume") pod "4ce6a756-7c72-45f6-abb8-96d9597b7429" (UID: "4ce6a756-7c72-45f6-abb8-96d9597b7429"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.942794 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e6849e2-9e49-4ca7-a868-66d7835562e2-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5e6849e2-9e49-4ca7-a868-66d7835562e2" (UID: "5e6849e2-9e49-4ca7-a868-66d7835562e2"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.942858 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e6849e2-9e49-4ca7-a868-66d7835562e2-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5e6849e2-9e49-4ca7-a868-66d7835562e2" (UID: "5e6849e2-9e49-4ca7-a868-66d7835562e2"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.944056 4916 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ce6a756-7c72-45f6-abb8-96d9597b7429-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.944075 4916 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e6849e2-9e49-4ca7-a868-66d7835562e2-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.944122 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e6849e2-9e49-4ca7-a868-66d7835562e2-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.947213 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ce6a756-7c72-45f6-abb8-96d9597b7429-kube-api-access-vqxpm" (OuterVolumeSpecName: "kube-api-access-vqxpm") pod "4ce6a756-7c72-45f6-abb8-96d9597b7429" (UID: "4ce6a756-7c72-45f6-abb8-96d9597b7429"). InnerVolumeSpecName "kube-api-access-vqxpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.953871 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k6d96"] Dec 03 19:32:17 crc kubenswrapper[4916]: I1203 19:32:17.995433 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ce6a756-7c72-45f6-abb8-96d9597b7429-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4ce6a756-7c72-45f6-abb8-96d9597b7429" (UID: "4ce6a756-7c72-45f6-abb8-96d9597b7429"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.045391 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqxpm\" (UniqueName: \"kubernetes.io/projected/4ce6a756-7c72-45f6-abb8-96d9597b7429-kube-api-access-vqxpm\") on node \"crc\" DevicePath \"\"" Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.045439 4916 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ce6a756-7c72-45f6-abb8-96d9597b7429-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.066964 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:18 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:18 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:18 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.067020 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.183412 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.183412 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8" event={"ID":"4ce6a756-7c72-45f6-abb8-96d9597b7429","Type":"ContainerDied","Data":"f55b89a70d1d826e7bb91d872d069afbbf9d6fc01608ddc9284a02c65d070ec4"} Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.183828 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f55b89a70d1d826e7bb91d872d069afbbf9d6fc01608ddc9284a02c65d070ec4" Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.189019 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5e6849e2-9e49-4ca7-a868-66d7835562e2","Type":"ContainerDied","Data":"87eaa6c03ee32506bad4f68baa614b240e24581a807961e99088f8ef0aa8da86"} Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.189071 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87eaa6c03ee32506bad4f68baa614b240e24581a807961e99088f8ef0aa8da86" Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.189031 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.192645 4916 generic.go:334] "Generic (PLEG): container finished" podID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerID="4248ddf150c4aa6c2dfba229d45580d48245292c249b26e2868955912c345251" exitCode=0 Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.192706 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n2hmt" event={"ID":"4e7df06a-1d77-40e3-916c-581b46b747eb","Type":"ContainerDied","Data":"4248ddf150c4aa6c2dfba229d45580d48245292c249b26e2868955912c345251"} Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.192730 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n2hmt" event={"ID":"4e7df06a-1d77-40e3-916c-581b46b747eb","Type":"ContainerStarted","Data":"03425f39f8e308665b4fc62a5d9e5e2f52ff7f14181843c2ac095d3a09d9d002"} Dec 03 19:32:18 crc kubenswrapper[4916]: I1203 19:32:18.207815 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6d96" event={"ID":"d4896794-1b4d-4663-aa0b-18c59d5c8486","Type":"ContainerStarted","Data":"ad119045671f3778689c7d65be38934ed6ffbccf52e53d50726b3ebaa62e5626"} Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.067552 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:19 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:19 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:19 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.067630 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.150197 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 19:32:19 crc kubenswrapper[4916]: E1203 19:32:19.150605 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e6849e2-9e49-4ca7-a868-66d7835562e2" containerName="pruner" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.150617 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e6849e2-9e49-4ca7-a868-66d7835562e2" containerName="pruner" Dec 03 19:32:19 crc kubenswrapper[4916]: E1203 19:32:19.150642 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ce6a756-7c72-45f6-abb8-96d9597b7429" containerName="collect-profiles" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.150648 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ce6a756-7c72-45f6-abb8-96d9597b7429" containerName="collect-profiles" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.151265 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e6849e2-9e49-4ca7-a868-66d7835562e2" containerName="pruner" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.151291 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ce6a756-7c72-45f6-abb8-96d9597b7429" containerName="collect-profiles" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.151735 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.154374 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.154487 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.169378 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.214576 4916 generic.go:334] "Generic (PLEG): container finished" podID="d4896794-1b4d-4663-aa0b-18c59d5c8486" containerID="12e0d5616024f3b2947ed313a06868e79ef7892b77417c6d9e325a50cd1f76de" exitCode=0 Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.214613 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6d96" event={"ID":"d4896794-1b4d-4663-aa0b-18c59d5c8486","Type":"ContainerDied","Data":"12e0d5616024f3b2947ed313a06868e79ef7892b77417c6d9e325a50cd1f76de"} Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.275668 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8c604532-8dc1-4001-9c9f-ca1fe9f5172b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.275764 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8c604532-8dc1-4001-9c9f-ca1fe9f5172b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.377219 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8c604532-8dc1-4001-9c9f-ca1fe9f5172b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.377301 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8c604532-8dc1-4001-9c9f-ca1fe9f5172b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.377974 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8c604532-8dc1-4001-9c9f-ca1fe9f5172b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.409017 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8c604532-8dc1-4001-9c9f-ca1fe9f5172b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.483123 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 19:32:19 crc kubenswrapper[4916]: I1203 19:32:19.975056 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 19:32:20 crc kubenswrapper[4916]: W1203 19:32:20.009606 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod8c604532_8dc1_4001_9c9f_ca1fe9f5172b.slice/crio-842f5d40fbbf428d7e6ef6967b83c0f1341921738c69dbb21265ad0bc345da48 WatchSource:0}: Error finding container 842f5d40fbbf428d7e6ef6967b83c0f1341921738c69dbb21265ad0bc345da48: Status 404 returned error can't find the container with id 842f5d40fbbf428d7e6ef6967b83c0f1341921738c69dbb21265ad0bc345da48 Dec 03 19:32:20 crc kubenswrapper[4916]: I1203 19:32:20.066487 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:20 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:20 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:20 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:20 crc kubenswrapper[4916]: I1203 19:32:20.066986 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:20 crc kubenswrapper[4916]: I1203 19:32:20.223267 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8c604532-8dc1-4001-9c9f-ca1fe9f5172b","Type":"ContainerStarted","Data":"842f5d40fbbf428d7e6ef6967b83c0f1341921738c69dbb21265ad0bc345da48"} Dec 03 19:32:20 crc kubenswrapper[4916]: I1203 19:32:20.802354 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-ckbgp" Dec 03 19:32:21 crc kubenswrapper[4916]: I1203 19:32:21.064556 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:21 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:21 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:21 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:21 crc kubenswrapper[4916]: I1203 19:32:21.064626 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:21 crc kubenswrapper[4916]: I1203 19:32:21.165605 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:21 crc kubenswrapper[4916]: I1203 19:32:21.169979 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-87mhb" Dec 03 19:32:22 crc kubenswrapper[4916]: I1203 19:32:22.066379 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:22 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:22 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:22 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:22 crc kubenswrapper[4916]: I1203 19:32:22.066677 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:22 crc kubenswrapper[4916]: I1203 19:32:22.330933 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8c604532-8dc1-4001-9c9f-ca1fe9f5172b","Type":"ContainerStarted","Data":"c53697501e441150ea238bb69e728ec0bbaf192b40085b39128024510a70e68b"} Dec 03 19:32:22 crc kubenswrapper[4916]: I1203 19:32:22.363857 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.363829621 podStartE2EDuration="3.363829621s" podCreationTimestamp="2025-12-03 19:32:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:32:22.357972374 +0000 UTC m=+158.320782640" watchObservedRunningTime="2025-12-03 19:32:22.363829621 +0000 UTC m=+158.326639887" Dec 03 19:32:23 crc kubenswrapper[4916]: I1203 19:32:23.068774 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:23 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:23 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:23 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:23 crc kubenswrapper[4916]: I1203 19:32:23.068866 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:23 crc kubenswrapper[4916]: I1203 19:32:23.349554 4916 generic.go:334] "Generic (PLEG): container finished" podID="8c604532-8dc1-4001-9c9f-ca1fe9f5172b" containerID="c53697501e441150ea238bb69e728ec0bbaf192b40085b39128024510a70e68b" exitCode=0 Dec 03 19:32:23 crc kubenswrapper[4916]: I1203 19:32:23.349625 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8c604532-8dc1-4001-9c9f-ca1fe9f5172b","Type":"ContainerDied","Data":"c53697501e441150ea238bb69e728ec0bbaf192b40085b39128024510a70e68b"} Dec 03 19:32:24 crc kubenswrapper[4916]: I1203 19:32:24.069896 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:24 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:24 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:24 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:24 crc kubenswrapper[4916]: I1203 19:32:24.069984 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:25 crc kubenswrapper[4916]: I1203 19:32:25.070103 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:25 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:25 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:25 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:25 crc kubenswrapper[4916]: I1203 19:32:25.070178 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:25 crc kubenswrapper[4916]: I1203 19:32:25.641363 4916 patch_prober.go:28] interesting pod/console-f9d7485db-cdztl container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Dec 03 19:32:25 crc kubenswrapper[4916]: I1203 19:32:25.641441 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-cdztl" podUID="1d18bd86-a58f-451c-90c0-9fa9834c6d77" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Dec 03 19:32:25 crc kubenswrapper[4916]: I1203 19:32:25.669825 4916 patch_prober.go:28] interesting pod/downloads-7954f5f757-dsf48 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 03 19:32:25 crc kubenswrapper[4916]: I1203 19:32:25.669883 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-dsf48" podUID="d83669b5-21c4-48ad-99f9-5abccbf369a3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 03 19:32:25 crc kubenswrapper[4916]: I1203 19:32:25.670322 4916 patch_prober.go:28] interesting pod/downloads-7954f5f757-dsf48 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 03 19:32:25 crc kubenswrapper[4916]: I1203 19:32:25.670423 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-dsf48" podUID="d83669b5-21c4-48ad-99f9-5abccbf369a3" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 03 19:32:26 crc kubenswrapper[4916]: I1203 19:32:26.065401 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:26 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:26 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:26 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:26 crc kubenswrapper[4916]: I1203 19:32:26.065651 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:27 crc kubenswrapper[4916]: I1203 19:32:27.019043 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:32:27 crc kubenswrapper[4916]: I1203 19:32:27.039025 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ae5584e-d1d9-4aa9-955a-41bdf15f0461-metrics-certs\") pod \"network-metrics-daemon-kbxgw\" (UID: \"9ae5584e-d1d9-4aa9-955a-41bdf15f0461\") " pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:32:27 crc kubenswrapper[4916]: I1203 19:32:27.075823 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:27 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:27 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:27 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:27 crc kubenswrapper[4916]: I1203 19:32:27.075951 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:27 crc kubenswrapper[4916]: I1203 19:32:27.211174 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-kbxgw" Dec 03 19:32:28 crc kubenswrapper[4916]: I1203 19:32:28.064709 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:28 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:28 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:28 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:28 crc kubenswrapper[4916]: I1203 19:32:28.064766 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:29 crc kubenswrapper[4916]: I1203 19:32:29.066033 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:29 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:29 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:29 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:29 crc kubenswrapper[4916]: I1203 19:32:29.066628 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:30 crc kubenswrapper[4916]: I1203 19:32:30.070748 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:30 crc kubenswrapper[4916]: [-]has-synced failed: reason withheld Dec 03 19:32:30 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:30 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:30 crc kubenswrapper[4916]: I1203 19:32:30.070813 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:31 crc kubenswrapper[4916]: I1203 19:32:31.067458 4916 patch_prober.go:28] interesting pod/router-default-5444994796-d2kts container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 19:32:31 crc kubenswrapper[4916]: [+]has-synced ok Dec 03 19:32:31 crc kubenswrapper[4916]: [+]process-running ok Dec 03 19:32:31 crc kubenswrapper[4916]: healthz check failed Dec 03 19:32:31 crc kubenswrapper[4916]: I1203 19:32:31.068241 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-d2kts" podUID="d3a49aea-7afb-4578-9717-58559d47a1fe" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 19:32:32 crc kubenswrapper[4916]: I1203 19:32:32.065069 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:32 crc kubenswrapper[4916]: I1203 19:32:32.067220 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-d2kts" Dec 03 19:32:35 crc kubenswrapper[4916]: I1203 19:32:35.062759 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:32:35 crc kubenswrapper[4916]: I1203 19:32:35.641132 4916 patch_prober.go:28] interesting pod/console-f9d7485db-cdztl container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Dec 03 19:32:35 crc kubenswrapper[4916]: I1203 19:32:35.641233 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-cdztl" podUID="1d18bd86-a58f-451c-90c0-9fa9834c6d77" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Dec 03 19:32:35 crc kubenswrapper[4916]: I1203 19:32:35.688465 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-dsf48" Dec 03 19:32:36 crc kubenswrapper[4916]: I1203 19:32:36.488244 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 19:32:36 crc kubenswrapper[4916]: I1203 19:32:36.565229 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8c604532-8dc1-4001-9c9f-ca1fe9f5172b","Type":"ContainerDied","Data":"842f5d40fbbf428d7e6ef6967b83c0f1341921738c69dbb21265ad0bc345da48"} Dec 03 19:32:36 crc kubenswrapper[4916]: I1203 19:32:36.565284 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="842f5d40fbbf428d7e6ef6967b83c0f1341921738c69dbb21265ad0bc345da48" Dec 03 19:32:36 crc kubenswrapper[4916]: I1203 19:32:36.565334 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 19:32:36 crc kubenswrapper[4916]: I1203 19:32:36.600024 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kubelet-dir\") pod \"8c604532-8dc1-4001-9c9f-ca1fe9f5172b\" (UID: \"8c604532-8dc1-4001-9c9f-ca1fe9f5172b\") " Dec 03 19:32:36 crc kubenswrapper[4916]: I1203 19:32:36.600185 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8c604532-8dc1-4001-9c9f-ca1fe9f5172b" (UID: "8c604532-8dc1-4001-9c9f-ca1fe9f5172b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:32:36 crc kubenswrapper[4916]: I1203 19:32:36.600255 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kube-api-access\") pod \"8c604532-8dc1-4001-9c9f-ca1fe9f5172b\" (UID: \"8c604532-8dc1-4001-9c9f-ca1fe9f5172b\") " Dec 03 19:32:36 crc kubenswrapper[4916]: I1203 19:32:36.600667 4916 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 19:32:36 crc kubenswrapper[4916]: I1203 19:32:36.606289 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8c604532-8dc1-4001-9c9f-ca1fe9f5172b" (UID: "8c604532-8dc1-4001-9c9f-ca1fe9f5172b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:32:36 crc kubenswrapper[4916]: I1203 19:32:36.702377 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c604532-8dc1-4001-9c9f-ca1fe9f5172b-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 19:32:45 crc kubenswrapper[4916]: I1203 19:32:45.645953 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:45 crc kubenswrapper[4916]: I1203 19:32:45.651588 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:32:45 crc kubenswrapper[4916]: I1203 19:32:45.891497 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-gxng9" Dec 03 19:32:46 crc kubenswrapper[4916]: I1203 19:32:46.159544 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:32:46 crc kubenswrapper[4916]: I1203 19:32:46.159934 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.527664 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 19:32:52 crc kubenswrapper[4916]: E1203 19:32:52.530202 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c604532-8dc1-4001-9c9f-ca1fe9f5172b" containerName="pruner" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.530424 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c604532-8dc1-4001-9c9f-ca1fe9f5172b" containerName="pruner" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.530963 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c604532-8dc1-4001-9c9f-ca1fe9f5172b" containerName="pruner" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.531985 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.532152 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.535081 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.535610 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.576789 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ea30fa86-a8f4-4631-b664-06010167aea7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ea30fa86-a8f4-4631-b664-06010167aea7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.576869 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ea30fa86-a8f4-4631-b664-06010167aea7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ea30fa86-a8f4-4631-b664-06010167aea7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.678518 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ea30fa86-a8f4-4631-b664-06010167aea7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ea30fa86-a8f4-4631-b664-06010167aea7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.678686 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ea30fa86-a8f4-4631-b664-06010167aea7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ea30fa86-a8f4-4631-b664-06010167aea7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.678832 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ea30fa86-a8f4-4631-b664-06010167aea7-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ea30fa86-a8f4-4631-b664-06010167aea7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.703668 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ea30fa86-a8f4-4631-b664-06010167aea7-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ea30fa86-a8f4-4631-b664-06010167aea7\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 19:32:52 crc kubenswrapper[4916]: I1203 19:32:52.864036 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 19:32:53 crc kubenswrapper[4916]: I1203 19:32:53.411088 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 19:32:53 crc kubenswrapper[4916]: E1203 19:32:53.619078 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 03 19:32:53 crc kubenswrapper[4916]: E1203 19:32:53.619250 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nzfhf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-n7fbp_openshift-marketplace(9f268348-698e-4764-8eb6-3ce43fdc4f9b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 19:32:53 crc kubenswrapper[4916]: E1203 19:32:53.620755 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-n7fbp" podUID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" Dec 03 19:32:54 crc kubenswrapper[4916]: E1203 19:32:54.065888 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-n7fbp" podUID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" Dec 03 19:32:56 crc kubenswrapper[4916]: E1203 19:32:56.928043 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 03 19:32:56 crc kubenswrapper[4916]: E1203 19:32:56.928189 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l8rp4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pvd7g_openshift-marketplace(0551f9a0-8ac5-4b28-bf49-b507428e6b05): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 19:32:56 crc kubenswrapper[4916]: E1203 19:32:56.929519 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-pvd7g" podUID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.705701 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.708144 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.718356 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.745218 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-kubelet-dir\") pod \"installer-9-crc\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.745296 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7ca918bc-e4f8-485f-b091-092b90e45048-kube-api-access\") pod \"installer-9-crc\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.745334 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-var-lock\") pod \"installer-9-crc\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.846030 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-kubelet-dir\") pod \"installer-9-crc\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.846130 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7ca918bc-e4f8-485f-b091-092b90e45048-kube-api-access\") pod \"installer-9-crc\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.846165 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-var-lock\") pod \"installer-9-crc\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.846176 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-kubelet-dir\") pod \"installer-9-crc\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.846255 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-var-lock\") pod \"installer-9-crc\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:32:57 crc kubenswrapper[4916]: I1203 19:32:57.867143 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7ca918bc-e4f8-485f-b091-092b90e45048-kube-api-access\") pod \"installer-9-crc\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:32:58 crc kubenswrapper[4916]: I1203 19:32:58.042077 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:32:59 crc kubenswrapper[4916]: E1203 19:32:59.330969 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pvd7g" podUID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" Dec 03 19:32:59 crc kubenswrapper[4916]: E1203 19:32:59.467185 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 03 19:32:59 crc kubenswrapper[4916]: E1203 19:32:59.467432 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rstx6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-n2hmt_openshift-marketplace(4e7df06a-1d77-40e3-916c-581b46b747eb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 19:32:59 crc kubenswrapper[4916]: E1203 19:32:59.469381 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-n2hmt" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" Dec 03 19:32:59 crc kubenswrapper[4916]: E1203 19:32:59.484387 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 03 19:32:59 crc kubenswrapper[4916]: E1203 19:32:59.484605 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mshqj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-h8kd6_openshift-marketplace(ac59f68d-2f62-419c-9244-ea9ee95242f8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 19:32:59 crc kubenswrapper[4916]: E1203 19:32:59.485828 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-h8kd6" podUID="ac59f68d-2f62-419c-9244-ea9ee95242f8" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.085252 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-h8kd6" podUID="ac59f68d-2f62-419c-9244-ea9ee95242f8" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.085322 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-n2hmt" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" Dec 03 19:33:05 crc kubenswrapper[4916]: I1203 19:33:05.603161 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-kbxgw"] Dec 03 19:33:05 crc kubenswrapper[4916]: I1203 19:33:05.606999 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 19:33:05 crc kubenswrapper[4916]: I1203 19:33:05.688251 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 19:33:05 crc kubenswrapper[4916]: W1203 19:33:05.724905 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podea30fa86_a8f4_4631_b664_06010167aea7.slice/crio-8e95ff2e6b8574a4357c32225671bea954ba471d9d6d5a4da01a6acbac3deb92 WatchSource:0}: Error finding container 8e95ff2e6b8574a4357c32225671bea954ba471d9d6d5a4da01a6acbac3deb92: Status 404 returned error can't find the container with id 8e95ff2e6b8574a4357c32225671bea954ba471d9d6d5a4da01a6acbac3deb92 Dec 03 19:33:05 crc kubenswrapper[4916]: I1203 19:33:05.728964 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7ca918bc-e4f8-485f-b091-092b90e45048","Type":"ContainerStarted","Data":"d8f34bbac60f9b11117734c97dabd0cd42189855a36b8690e8ed2908e4cd4f2e"} Dec 03 19:33:05 crc kubenswrapper[4916]: I1203 19:33:05.729684 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" event={"ID":"9ae5584e-d1d9-4aa9-955a-41bdf15f0461","Type":"ContainerStarted","Data":"8d279f138bbcfcbfa1ed1224e51e775e8e1860911d67c39aec3e1e016fdc1c7f"} Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.820424 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.820559 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5ptx2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-r4lq2_openshift-marketplace(83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.821833 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-r4lq2" podUID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.901864 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.902359 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gcfsh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-s7w54_openshift-marketplace(0d597107-0497-411e-8b94-fa47d1ddd065): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.904229 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-s7w54" podUID="0d597107-0497-411e-8b94-fa47d1ddd065" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.925336 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.925464 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mxzlf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-k6d96_openshift-marketplace(d4896794-1b4d-4663-aa0b-18c59d5c8486): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.926892 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-k6d96" podUID="d4896794-1b4d-4663-aa0b-18c59d5c8486" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.998272 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 03 19:33:05 crc kubenswrapper[4916]: E1203 19:33:05.998484 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bg2r2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-nq575_openshift-marketplace(d46fd03d-fe64-42cf-9e43-aab1f8c7519f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 19:33:06 crc kubenswrapper[4916]: E1203 19:33:05.999705 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-nq575" podUID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" Dec 03 19:33:06 crc kubenswrapper[4916]: I1203 19:33:06.737757 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7ca918bc-e4f8-485f-b091-092b90e45048","Type":"ContainerStarted","Data":"6ace1ac7a4e3aaf33c250d24a78ee08e5121378314099c94457accb1c712934a"} Dec 03 19:33:06 crc kubenswrapper[4916]: I1203 19:33:06.740446 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" event={"ID":"9ae5584e-d1d9-4aa9-955a-41bdf15f0461","Type":"ContainerStarted","Data":"8d5c8b3e348b8a015c0b3859f0e87c5da03b7455586906bbddbe051292465102"} Dec 03 19:33:06 crc kubenswrapper[4916]: I1203 19:33:06.740470 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-kbxgw" event={"ID":"9ae5584e-d1d9-4aa9-955a-41bdf15f0461","Type":"ContainerStarted","Data":"fb0e094964accbdaeaeae422e80571e257c5f4fe646eb6fa4f92466154906e34"} Dec 03 19:33:06 crc kubenswrapper[4916]: I1203 19:33:06.740930 4916 generic.go:334] "Generic (PLEG): container finished" podID="ea30fa86-a8f4-4631-b664-06010167aea7" containerID="0295a3da192d2295ea855209952e69e05e0bb3168d35e28dcce69a424cf4cdbb" exitCode=0 Dec 03 19:33:06 crc kubenswrapper[4916]: I1203 19:33:06.740960 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ea30fa86-a8f4-4631-b664-06010167aea7","Type":"ContainerDied","Data":"0295a3da192d2295ea855209952e69e05e0bb3168d35e28dcce69a424cf4cdbb"} Dec 03 19:33:06 crc kubenswrapper[4916]: I1203 19:33:06.741011 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ea30fa86-a8f4-4631-b664-06010167aea7","Type":"ContainerStarted","Data":"8e95ff2e6b8574a4357c32225671bea954ba471d9d6d5a4da01a6acbac3deb92"} Dec 03 19:33:06 crc kubenswrapper[4916]: E1203 19:33:06.742467 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-k6d96" podUID="d4896794-1b4d-4663-aa0b-18c59d5c8486" Dec 03 19:33:06 crc kubenswrapper[4916]: E1203 19:33:06.743080 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-r4lq2" podUID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" Dec 03 19:33:06 crc kubenswrapper[4916]: E1203 19:33:06.743141 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-nq575" podUID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" Dec 03 19:33:06 crc kubenswrapper[4916]: E1203 19:33:06.743327 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-s7w54" podUID="0d597107-0497-411e-8b94-fa47d1ddd065" Dec 03 19:33:06 crc kubenswrapper[4916]: I1203 19:33:06.766510 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=9.766493385 podStartE2EDuration="9.766493385s" podCreationTimestamp="2025-12-03 19:32:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:33:06.756006937 +0000 UTC m=+202.718817203" watchObservedRunningTime="2025-12-03 19:33:06.766493385 +0000 UTC m=+202.729303651" Dec 03 19:33:06 crc kubenswrapper[4916]: I1203 19:33:06.802048 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-kbxgw" podStartSLOduration=182.802025242 podStartE2EDuration="3m2.802025242s" podCreationTimestamp="2025-12-03 19:30:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:33:06.80038435 +0000 UTC m=+202.763194626" watchObservedRunningTime="2025-12-03 19:33:06.802025242 +0000 UTC m=+202.764835518" Dec 03 19:33:07 crc kubenswrapper[4916]: I1203 19:33:07.981837 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 19:33:08 crc kubenswrapper[4916]: I1203 19:33:08.178677 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ea30fa86-a8f4-4631-b664-06010167aea7-kube-api-access\") pod \"ea30fa86-a8f4-4631-b664-06010167aea7\" (UID: \"ea30fa86-a8f4-4631-b664-06010167aea7\") " Dec 03 19:33:08 crc kubenswrapper[4916]: I1203 19:33:08.178803 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ea30fa86-a8f4-4631-b664-06010167aea7-kubelet-dir\") pod \"ea30fa86-a8f4-4631-b664-06010167aea7\" (UID: \"ea30fa86-a8f4-4631-b664-06010167aea7\") " Dec 03 19:33:08 crc kubenswrapper[4916]: I1203 19:33:08.178893 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ea30fa86-a8f4-4631-b664-06010167aea7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ea30fa86-a8f4-4631-b664-06010167aea7" (UID: "ea30fa86-a8f4-4631-b664-06010167aea7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:33:08 crc kubenswrapper[4916]: I1203 19:33:08.179275 4916 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ea30fa86-a8f4-4631-b664-06010167aea7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:08 crc kubenswrapper[4916]: I1203 19:33:08.183513 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea30fa86-a8f4-4631-b664-06010167aea7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ea30fa86-a8f4-4631-b664-06010167aea7" (UID: "ea30fa86-a8f4-4631-b664-06010167aea7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:33:08 crc kubenswrapper[4916]: I1203 19:33:08.280005 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ea30fa86-a8f4-4631-b664-06010167aea7-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:08 crc kubenswrapper[4916]: I1203 19:33:08.754997 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ea30fa86-a8f4-4631-b664-06010167aea7","Type":"ContainerDied","Data":"8e95ff2e6b8574a4357c32225671bea954ba471d9d6d5a4da01a6acbac3deb92"} Dec 03 19:33:08 crc kubenswrapper[4916]: I1203 19:33:08.755902 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e95ff2e6b8574a4357c32225671bea954ba471d9d6d5a4da01a6acbac3deb92" Dec 03 19:33:08 crc kubenswrapper[4916]: I1203 19:33:08.755303 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 19:33:09 crc kubenswrapper[4916]: I1203 19:33:09.762764 4916 generic.go:334] "Generic (PLEG): container finished" podID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" containerID="825ac6e66a83540f88ded68a3b159e15384e973f65df0efacd13853e6720138c" exitCode=0 Dec 03 19:33:09 crc kubenswrapper[4916]: I1203 19:33:09.763364 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7fbp" event={"ID":"9f268348-698e-4764-8eb6-3ce43fdc4f9b","Type":"ContainerDied","Data":"825ac6e66a83540f88ded68a3b159e15384e973f65df0efacd13853e6720138c"} Dec 03 19:33:10 crc kubenswrapper[4916]: I1203 19:33:10.772824 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7fbp" event={"ID":"9f268348-698e-4764-8eb6-3ce43fdc4f9b","Type":"ContainerStarted","Data":"8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9"} Dec 03 19:33:10 crc kubenswrapper[4916]: I1203 19:33:10.793976 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n7fbp" podStartSLOduration=2.764481185 podStartE2EDuration="55.793958235s" podCreationTimestamp="2025-12-03 19:32:15 +0000 UTC" firstStartedPulling="2025-12-03 19:32:17.144118236 +0000 UTC m=+153.106928502" lastFinishedPulling="2025-12-03 19:33:10.173595286 +0000 UTC m=+206.136405552" observedRunningTime="2025-12-03 19:33:10.792537379 +0000 UTC m=+206.755347645" watchObservedRunningTime="2025-12-03 19:33:10.793958235 +0000 UTC m=+206.756768511" Dec 03 19:33:12 crc kubenswrapper[4916]: I1203 19:33:12.889888 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9xdsf"] Dec 03 19:33:14 crc kubenswrapper[4916]: I1203 19:33:14.802084 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvd7g" event={"ID":"0551f9a0-8ac5-4b28-bf49-b507428e6b05","Type":"ContainerStarted","Data":"639162285b4fbaa5ffdb85d76bf427ac8b5a98f3eb69a0ec6f7b15b2f3f588f5"} Dec 03 19:33:15 crc kubenswrapper[4916]: I1203 19:33:15.810252 4916 generic.go:334] "Generic (PLEG): container finished" podID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" containerID="639162285b4fbaa5ffdb85d76bf427ac8b5a98f3eb69a0ec6f7b15b2f3f588f5" exitCode=0 Dec 03 19:33:15 crc kubenswrapper[4916]: I1203 19:33:15.810317 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvd7g" event={"ID":"0551f9a0-8ac5-4b28-bf49-b507428e6b05","Type":"ContainerDied","Data":"639162285b4fbaa5ffdb85d76bf427ac8b5a98f3eb69a0ec6f7b15b2f3f588f5"} Dec 03 19:33:15 crc kubenswrapper[4916]: I1203 19:33:15.810382 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvd7g" event={"ID":"0551f9a0-8ac5-4b28-bf49-b507428e6b05","Type":"ContainerStarted","Data":"2eaa495cc93cf41eed15c410ffd15cd17f6af02dbec6bd3c5540ee2c7b498ae0"} Dec 03 19:33:15 crc kubenswrapper[4916]: I1203 19:33:15.830684 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pvd7g" podStartSLOduration=4.567177197 podStartE2EDuration="1m3.830655744s" podCreationTimestamp="2025-12-03 19:32:12 +0000 UTC" firstStartedPulling="2025-12-03 19:32:16.086577456 +0000 UTC m=+152.049387722" lastFinishedPulling="2025-12-03 19:33:15.350056003 +0000 UTC m=+211.312866269" observedRunningTime="2025-12-03 19:33:15.826769495 +0000 UTC m=+211.789579771" watchObservedRunningTime="2025-12-03 19:33:15.830655744 +0000 UTC m=+211.793466000" Dec 03 19:33:16 crc kubenswrapper[4916]: I1203 19:33:16.159226 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:33:16 crc kubenswrapper[4916]: I1203 19:33:16.159692 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:33:16 crc kubenswrapper[4916]: I1203 19:33:16.159749 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:33:16 crc kubenswrapper[4916]: I1203 19:33:16.160410 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 19:33:16 crc kubenswrapper[4916]: I1203 19:33:16.160540 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216" gracePeriod=600 Dec 03 19:33:16 crc kubenswrapper[4916]: I1203 19:33:16.234040 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:33:16 crc kubenswrapper[4916]: I1203 19:33:16.234106 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:33:16 crc kubenswrapper[4916]: I1203 19:33:16.306013 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:33:16 crc kubenswrapper[4916]: I1203 19:33:16.818649 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216" exitCode=0 Dec 03 19:33:16 crc kubenswrapper[4916]: I1203 19:33:16.818837 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216"} Dec 03 19:33:16 crc kubenswrapper[4916]: I1203 19:33:16.819472 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"f565473c58fe0f0bf1244c6738ae4337c346a70b2f59f37b60836c87e6c33bc3"} Dec 03 19:33:17 crc kubenswrapper[4916]: I1203 19:33:17.114238 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:33:18 crc kubenswrapper[4916]: I1203 19:33:18.183220 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n7fbp"] Dec 03 19:33:18 crc kubenswrapper[4916]: I1203 19:33:18.834115 4916 generic.go:334] "Generic (PLEG): container finished" podID="ac59f68d-2f62-419c-9244-ea9ee95242f8" containerID="1dd27cf7911a1ea172b8f3d72d049fbde892519b24d52153137b8591909844d3" exitCode=0 Dec 03 19:33:18 crc kubenswrapper[4916]: I1203 19:33:18.834226 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h8kd6" event={"ID":"ac59f68d-2f62-419c-9244-ea9ee95242f8","Type":"ContainerDied","Data":"1dd27cf7911a1ea172b8f3d72d049fbde892519b24d52153137b8591909844d3"} Dec 03 19:33:19 crc kubenswrapper[4916]: I1203 19:33:19.845054 4916 generic.go:334] "Generic (PLEG): container finished" podID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" containerID="402faacce3ddda7357ffedbaaf10356801d316a0f6a73b3f554880f7fcdf29f2" exitCode=0 Dec 03 19:33:19 crc kubenswrapper[4916]: I1203 19:33:19.845240 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nq575" event={"ID":"d46fd03d-fe64-42cf-9e43-aab1f8c7519f","Type":"ContainerDied","Data":"402faacce3ddda7357ffedbaaf10356801d316a0f6a73b3f554880f7fcdf29f2"} Dec 03 19:33:19 crc kubenswrapper[4916]: I1203 19:33:19.848962 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n2hmt" event={"ID":"4e7df06a-1d77-40e3-916c-581b46b747eb","Type":"ContainerStarted","Data":"7006a8bce0318bcf64f7c7c7dabde054121e639e0fb0bf2a47a7bda0fc9af0f9"} Dec 03 19:33:19 crc kubenswrapper[4916]: I1203 19:33:19.852472 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h8kd6" event={"ID":"ac59f68d-2f62-419c-9244-ea9ee95242f8","Type":"ContainerStarted","Data":"171e51e86dc4083082b5bd0083ebba551d7bddc455a13319370d0305733ea061"} Dec 03 19:33:19 crc kubenswrapper[4916]: I1203 19:33:19.852701 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n7fbp" podUID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" containerName="registry-server" containerID="cri-o://8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9" gracePeriod=2 Dec 03 19:33:19 crc kubenswrapper[4916]: I1203 19:33:19.912249 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h8kd6" podStartSLOduration=2.7849711299999997 podStartE2EDuration="1m4.912226016s" podCreationTimestamp="2025-12-03 19:32:15 +0000 UTC" firstStartedPulling="2025-12-03 19:32:17.142495946 +0000 UTC m=+153.105306212" lastFinishedPulling="2025-12-03 19:33:19.269750822 +0000 UTC m=+215.232561098" observedRunningTime="2025-12-03 19:33:19.910145113 +0000 UTC m=+215.872955389" watchObservedRunningTime="2025-12-03 19:33:19.912226016 +0000 UTC m=+215.875036282" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.811944 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.861384 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nq575" event={"ID":"d46fd03d-fe64-42cf-9e43-aab1f8c7519f","Type":"ContainerStarted","Data":"9b9b73e56a5d3619650175c8eb93ecab102b9db5b7b41b013a01254d402d0b40"} Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.864636 4916 generic.go:334] "Generic (PLEG): container finished" podID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" containerID="8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9" exitCode=0 Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.864696 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7fbp" event={"ID":"9f268348-698e-4764-8eb6-3ce43fdc4f9b","Type":"ContainerDied","Data":"8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9"} Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.864726 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n7fbp" event={"ID":"9f268348-698e-4764-8eb6-3ce43fdc4f9b","Type":"ContainerDied","Data":"329616fc384b0090a23bc2ae47f2b733131327c763f394430a67cab694eac89c"} Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.864750 4916 scope.go:117] "RemoveContainer" containerID="8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.864791 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n7fbp" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.869070 4916 generic.go:334] "Generic (PLEG): container finished" podID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerID="7006a8bce0318bcf64f7c7c7dabde054121e639e0fb0bf2a47a7bda0fc9af0f9" exitCode=0 Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.869138 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n2hmt" event={"ID":"4e7df06a-1d77-40e3-916c-581b46b747eb","Type":"ContainerDied","Data":"7006a8bce0318bcf64f7c7c7dabde054121e639e0fb0bf2a47a7bda0fc9af0f9"} Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.880681 4916 scope.go:117] "RemoveContainer" containerID="825ac6e66a83540f88ded68a3b159e15384e973f65df0efacd13853e6720138c" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.881519 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nq575" podStartSLOduration=3.646954987 podStartE2EDuration="1m7.881494244s" podCreationTimestamp="2025-12-03 19:32:13 +0000 UTC" firstStartedPulling="2025-12-03 19:32:16.079208571 +0000 UTC m=+152.042018837" lastFinishedPulling="2025-12-03 19:33:20.313747828 +0000 UTC m=+216.276558094" observedRunningTime="2025-12-03 19:33:20.879244837 +0000 UTC m=+216.842055103" watchObservedRunningTime="2025-12-03 19:33:20.881494244 +0000 UTC m=+216.844304510" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.901014 4916 scope.go:117] "RemoveContainer" containerID="53770bb90872df3d98f3fb6da38df266f7a2eaa6b1270a9c78edd889db613a99" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.917020 4916 scope.go:117] "RemoveContainer" containerID="8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9" Dec 03 19:33:20 crc kubenswrapper[4916]: E1203 19:33:20.917485 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9\": container with ID starting with 8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9 not found: ID does not exist" containerID="8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.917531 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9"} err="failed to get container status \"8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9\": rpc error: code = NotFound desc = could not find container \"8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9\": container with ID starting with 8596511256b4795efe148274724f16b2a241d2f87d1fc82a8b0919e811caaaf9 not found: ID does not exist" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.917575 4916 scope.go:117] "RemoveContainer" containerID="825ac6e66a83540f88ded68a3b159e15384e973f65df0efacd13853e6720138c" Dec 03 19:33:20 crc kubenswrapper[4916]: E1203 19:33:20.918031 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"825ac6e66a83540f88ded68a3b159e15384e973f65df0efacd13853e6720138c\": container with ID starting with 825ac6e66a83540f88ded68a3b159e15384e973f65df0efacd13853e6720138c not found: ID does not exist" containerID="825ac6e66a83540f88ded68a3b159e15384e973f65df0efacd13853e6720138c" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.918073 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"825ac6e66a83540f88ded68a3b159e15384e973f65df0efacd13853e6720138c"} err="failed to get container status \"825ac6e66a83540f88ded68a3b159e15384e973f65df0efacd13853e6720138c\": rpc error: code = NotFound desc = could not find container \"825ac6e66a83540f88ded68a3b159e15384e973f65df0efacd13853e6720138c\": container with ID starting with 825ac6e66a83540f88ded68a3b159e15384e973f65df0efacd13853e6720138c not found: ID does not exist" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.918098 4916 scope.go:117] "RemoveContainer" containerID="53770bb90872df3d98f3fb6da38df266f7a2eaa6b1270a9c78edd889db613a99" Dec 03 19:33:20 crc kubenswrapper[4916]: E1203 19:33:20.918396 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53770bb90872df3d98f3fb6da38df266f7a2eaa6b1270a9c78edd889db613a99\": container with ID starting with 53770bb90872df3d98f3fb6da38df266f7a2eaa6b1270a9c78edd889db613a99 not found: ID does not exist" containerID="53770bb90872df3d98f3fb6da38df266f7a2eaa6b1270a9c78edd889db613a99" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.918425 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53770bb90872df3d98f3fb6da38df266f7a2eaa6b1270a9c78edd889db613a99"} err="failed to get container status \"53770bb90872df3d98f3fb6da38df266f7a2eaa6b1270a9c78edd889db613a99\": rpc error: code = NotFound desc = could not find container \"53770bb90872df3d98f3fb6da38df266f7a2eaa6b1270a9c78edd889db613a99\": container with ID starting with 53770bb90872df3d98f3fb6da38df266f7a2eaa6b1270a9c78edd889db613a99 not found: ID does not exist" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.965294 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-utilities\") pod \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.965369 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzfhf\" (UniqueName: \"kubernetes.io/projected/9f268348-698e-4764-8eb6-3ce43fdc4f9b-kube-api-access-nzfhf\") pod \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.965419 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-catalog-content\") pod \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\" (UID: \"9f268348-698e-4764-8eb6-3ce43fdc4f9b\") " Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.966428 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-utilities" (OuterVolumeSpecName: "utilities") pod "9f268348-698e-4764-8eb6-3ce43fdc4f9b" (UID: "9f268348-698e-4764-8eb6-3ce43fdc4f9b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:33:20 crc kubenswrapper[4916]: I1203 19:33:20.972329 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f268348-698e-4764-8eb6-3ce43fdc4f9b-kube-api-access-nzfhf" (OuterVolumeSpecName: "kube-api-access-nzfhf") pod "9f268348-698e-4764-8eb6-3ce43fdc4f9b" (UID: "9f268348-698e-4764-8eb6-3ce43fdc4f9b"). InnerVolumeSpecName "kube-api-access-nzfhf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:33:21 crc kubenswrapper[4916]: I1203 19:33:21.001636 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9f268348-698e-4764-8eb6-3ce43fdc4f9b" (UID: "9f268348-698e-4764-8eb6-3ce43fdc4f9b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:33:21 crc kubenswrapper[4916]: I1203 19:33:21.067768 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:21 crc kubenswrapper[4916]: I1203 19:33:21.067822 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f268348-698e-4764-8eb6-3ce43fdc4f9b-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:21 crc kubenswrapper[4916]: I1203 19:33:21.067834 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzfhf\" (UniqueName: \"kubernetes.io/projected/9f268348-698e-4764-8eb6-3ce43fdc4f9b-kube-api-access-nzfhf\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:21 crc kubenswrapper[4916]: I1203 19:33:21.200499 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n7fbp"] Dec 03 19:33:21 crc kubenswrapper[4916]: I1203 19:33:21.203396 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n7fbp"] Dec 03 19:33:21 crc kubenswrapper[4916]: I1203 19:33:21.875903 4916 generic.go:334] "Generic (PLEG): container finished" podID="0d597107-0497-411e-8b94-fa47d1ddd065" containerID="7ab6d04354b090871dfd1e439be0ae5d7706ce5cc93bd8a2f0a854c8373067c6" exitCode=0 Dec 03 19:33:21 crc kubenswrapper[4916]: I1203 19:33:21.876007 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7w54" event={"ID":"0d597107-0497-411e-8b94-fa47d1ddd065","Type":"ContainerDied","Data":"7ab6d04354b090871dfd1e439be0ae5d7706ce5cc93bd8a2f0a854c8373067c6"} Dec 03 19:33:21 crc kubenswrapper[4916]: I1203 19:33:21.881299 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n2hmt" event={"ID":"4e7df06a-1d77-40e3-916c-581b46b747eb","Type":"ContainerStarted","Data":"7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c"} Dec 03 19:33:21 crc kubenswrapper[4916]: I1203 19:33:21.923241 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n2hmt" podStartSLOduration=2.753643303 podStartE2EDuration="1m5.923224481s" podCreationTimestamp="2025-12-03 19:32:16 +0000 UTC" firstStartedPulling="2025-12-03 19:32:18.194302553 +0000 UTC m=+154.157112819" lastFinishedPulling="2025-12-03 19:33:21.363883731 +0000 UTC m=+217.326693997" observedRunningTime="2025-12-03 19:33:21.920123812 +0000 UTC m=+217.882934068" watchObservedRunningTime="2025-12-03 19:33:21.923224481 +0000 UTC m=+217.886034747" Dec 03 19:33:22 crc kubenswrapper[4916]: I1203 19:33:22.484679 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" path="/var/lib/kubelet/pods/9f268348-698e-4764-8eb6-3ce43fdc4f9b/volumes" Dec 03 19:33:22 crc kubenswrapper[4916]: I1203 19:33:22.893368 4916 generic.go:334] "Generic (PLEG): container finished" podID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" containerID="3b5d7f957e99b462249f18907b8abfdf06a3507dc9f4ad7d2481210fd16a9887" exitCode=0 Dec 03 19:33:22 crc kubenswrapper[4916]: I1203 19:33:22.893467 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4lq2" event={"ID":"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe","Type":"ContainerDied","Data":"3b5d7f957e99b462249f18907b8abfdf06a3507dc9f4ad7d2481210fd16a9887"} Dec 03 19:33:22 crc kubenswrapper[4916]: I1203 19:33:22.897943 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7w54" event={"ID":"0d597107-0497-411e-8b94-fa47d1ddd065","Type":"ContainerStarted","Data":"f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6"} Dec 03 19:33:22 crc kubenswrapper[4916]: I1203 19:33:22.939716 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s7w54" podStartSLOduration=3.779775373 podStartE2EDuration="1m9.939686365s" podCreationTimestamp="2025-12-03 19:32:13 +0000 UTC" firstStartedPulling="2025-12-03 19:32:16.121372172 +0000 UTC m=+152.084182438" lastFinishedPulling="2025-12-03 19:33:22.281283164 +0000 UTC m=+218.244093430" observedRunningTime="2025-12-03 19:33:22.936687108 +0000 UTC m=+218.899497384" watchObservedRunningTime="2025-12-03 19:33:22.939686365 +0000 UTC m=+218.902496631" Dec 03 19:33:23 crc kubenswrapper[4916]: I1203 19:33:23.266950 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:33:23 crc kubenswrapper[4916]: I1203 19:33:23.267051 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:33:23 crc kubenswrapper[4916]: I1203 19:33:23.335245 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:33:23 crc kubenswrapper[4916]: I1203 19:33:23.689464 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nq575" Dec 03 19:33:23 crc kubenswrapper[4916]: I1203 19:33:23.689815 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nq575" Dec 03 19:33:23 crc kubenswrapper[4916]: I1203 19:33:23.739759 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nq575" Dec 03 19:33:23 crc kubenswrapper[4916]: I1203 19:33:23.906487 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6d96" event={"ID":"d4896794-1b4d-4663-aa0b-18c59d5c8486","Type":"ContainerStarted","Data":"255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab"} Dec 03 19:33:23 crc kubenswrapper[4916]: I1203 19:33:23.975903 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:33:24 crc kubenswrapper[4916]: I1203 19:33:24.083363 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:33:24 crc kubenswrapper[4916]: I1203 19:33:24.083416 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:33:24 crc kubenswrapper[4916]: I1203 19:33:24.137308 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:33:24 crc kubenswrapper[4916]: I1203 19:33:24.914403 4916 generic.go:334] "Generic (PLEG): container finished" podID="d4896794-1b4d-4663-aa0b-18c59d5c8486" containerID="255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab" exitCode=0 Dec 03 19:33:24 crc kubenswrapper[4916]: I1203 19:33:24.914473 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6d96" event={"ID":"d4896794-1b4d-4663-aa0b-18c59d5c8486","Type":"ContainerDied","Data":"255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab"} Dec 03 19:33:24 crc kubenswrapper[4916]: I1203 19:33:24.918482 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4lq2" event={"ID":"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe","Type":"ContainerStarted","Data":"a66b2170fa84a792a5940f775b39985b2543623f633d7683ddbf7dcf6f26679a"} Dec 03 19:33:24 crc kubenswrapper[4916]: I1203 19:33:24.955366 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-r4lq2" podStartSLOduration=4.608557756 podStartE2EDuration="1m11.955331118s" podCreationTimestamp="2025-12-03 19:32:13 +0000 UTC" firstStartedPulling="2025-12-03 19:32:16.083636172 +0000 UTC m=+152.046446438" lastFinishedPulling="2025-12-03 19:33:23.430409534 +0000 UTC m=+219.393219800" observedRunningTime="2025-12-03 19:33:24.950440443 +0000 UTC m=+220.913250729" watchObservedRunningTime="2025-12-03 19:33:24.955331118 +0000 UTC m=+220.918141384" Dec 03 19:33:25 crc kubenswrapper[4916]: I1203 19:33:25.819502 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:33:25 crc kubenswrapper[4916]: I1203 19:33:25.820036 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:33:25 crc kubenswrapper[4916]: I1203 19:33:25.870033 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:33:25 crc kubenswrapper[4916]: I1203 19:33:25.982366 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:33:26 crc kubenswrapper[4916]: I1203 19:33:26.625600 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:33:26 crc kubenswrapper[4916]: I1203 19:33:26.626807 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:33:26 crc kubenswrapper[4916]: I1203 19:33:26.931218 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6d96" event={"ID":"d4896794-1b4d-4663-aa0b-18c59d5c8486","Type":"ContainerStarted","Data":"a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1"} Dec 03 19:33:26 crc kubenswrapper[4916]: I1203 19:33:26.957574 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-k6d96" podStartSLOduration=4.337147068 podStartE2EDuration="1m10.9575355s" podCreationTimestamp="2025-12-03 19:32:16 +0000 UTC" firstStartedPulling="2025-12-03 19:32:19.215965021 +0000 UTC m=+155.178775287" lastFinishedPulling="2025-12-03 19:33:25.836353453 +0000 UTC m=+221.799163719" observedRunningTime="2025-12-03 19:33:26.953976989 +0000 UTC m=+222.916787255" watchObservedRunningTime="2025-12-03 19:33:26.9575355 +0000 UTC m=+222.920345766" Dec 03 19:33:27 crc kubenswrapper[4916]: I1203 19:33:27.693374 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-n2hmt" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerName="registry-server" probeResult="failure" output=< Dec 03 19:33:27 crc kubenswrapper[4916]: timeout: failed to connect service ":50051" within 1s Dec 03 19:33:27 crc kubenswrapper[4916]: > Dec 03 19:33:33 crc kubenswrapper[4916]: I1203 19:33:33.746753 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nq575" Dec 03 19:33:33 crc kubenswrapper[4916]: I1203 19:33:33.792405 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nq575"] Dec 03 19:33:33 crc kubenswrapper[4916]: I1203 19:33:33.941024 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:33:33 crc kubenswrapper[4916]: I1203 19:33:33.941090 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:33:33 crc kubenswrapper[4916]: I1203 19:33:33.972291 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nq575" podUID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" containerName="registry-server" containerID="cri-o://9b9b73e56a5d3619650175c8eb93ecab102b9db5b7b41b013a01254d402d0b40" gracePeriod=2 Dec 03 19:33:34 crc kubenswrapper[4916]: I1203 19:33:34.011851 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:33:34 crc kubenswrapper[4916]: I1203 19:33:34.056237 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:33:34 crc kubenswrapper[4916]: I1203 19:33:34.133805 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:33:35 crc kubenswrapper[4916]: I1203 19:33:35.783141 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s7w54"] Dec 03 19:33:35 crc kubenswrapper[4916]: I1203 19:33:35.783389 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s7w54" podUID="0d597107-0497-411e-8b94-fa47d1ddd065" containerName="registry-server" containerID="cri-o://f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6" gracePeriod=2 Dec 03 19:33:36 crc kubenswrapper[4916]: I1203 19:33:36.690325 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:33:36 crc kubenswrapper[4916]: I1203 19:33:36.785603 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:33:36 crc kubenswrapper[4916]: I1203 19:33:36.890320 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:33:36 crc kubenswrapper[4916]: I1203 19:33:36.890382 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:33:36 crc kubenswrapper[4916]: I1203 19:33:36.927215 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:33:37 crc kubenswrapper[4916]: I1203 19:33:37.046788 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:33:37 crc kubenswrapper[4916]: I1203 19:33:37.926454 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" podUID="b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" containerName="oauth-openshift" containerID="cri-o://faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb" gracePeriod=15 Dec 03 19:33:38 crc kubenswrapper[4916]: I1203 19:33:38.988298 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k6d96"] Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.692306 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nq575" Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.821303 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bg2r2\" (UniqueName: \"kubernetes.io/projected/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-kube-api-access-bg2r2\") pod \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.821372 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-catalog-content\") pod \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.821403 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-utilities\") pod \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\" (UID: \"d46fd03d-fe64-42cf-9e43-aab1f8c7519f\") " Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.822709 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-utilities" (OuterVolumeSpecName: "utilities") pod "d46fd03d-fe64-42cf-9e43-aab1f8c7519f" (UID: "d46fd03d-fe64-42cf-9e43-aab1f8c7519f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.831223 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-kube-api-access-bg2r2" (OuterVolumeSpecName: "kube-api-access-bg2r2") pod "d46fd03d-fe64-42cf-9e43-aab1f8c7519f" (UID: "d46fd03d-fe64-42cf-9e43-aab1f8c7519f"). InnerVolumeSpecName "kube-api-access-bg2r2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.846540 4916 generic.go:334] "Generic (PLEG): container finished" podID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" containerID="9b9b73e56a5d3619650175c8eb93ecab102b9db5b7b41b013a01254d402d0b40" exitCode=0 Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.846634 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nq575" event={"ID":"d46fd03d-fe64-42cf-9e43-aab1f8c7519f","Type":"ContainerDied","Data":"9b9b73e56a5d3619650175c8eb93ecab102b9db5b7b41b013a01254d402d0b40"} Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.846710 4916 scope.go:117] "RemoveContainer" containerID="9b9b73e56a5d3619650175c8eb93ecab102b9db5b7b41b013a01254d402d0b40" Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.869987 4916 scope.go:117] "RemoveContainer" containerID="402faacce3ddda7357ffedbaaf10356801d316a0f6a73b3f554880f7fcdf29f2" Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.888808 4916 scope.go:117] "RemoveContainer" containerID="4ffd340fd79cf6c8c80a5f54828ec9cb3dc43d72877f2e79f6530a4c92c28644" Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.896664 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d46fd03d-fe64-42cf-9e43-aab1f8c7519f" (UID: "d46fd03d-fe64-42cf-9e43-aab1f8c7519f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.922975 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.923032 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bg2r2\" (UniqueName: \"kubernetes.io/projected/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-kube-api-access-bg2r2\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:39 crc kubenswrapper[4916]: I1203 19:33:39.923055 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d46fd03d-fe64-42cf-9e43-aab1f8c7519f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.275860 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.382112 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-s7w54_0d597107-0497-411e-8b94-fa47d1ddd065/registry-server/0.log" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.382841 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.432676 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-trusted-ca-bundle\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.432765 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-router-certs\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.432799 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-cliconfig\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.432827 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-idp-0-file-data\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.432855 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-policies\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.432894 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-utilities\") pod \"0d597107-0497-411e-8b94-fa47d1ddd065\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.432917 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-dir\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.432948 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-ocp-branding-template\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.432997 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-serving-cert\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.433029 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-service-ca\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.433062 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-provider-selection\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.433086 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-catalog-content\") pod \"0d597107-0497-411e-8b94-fa47d1ddd065\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.433849 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-session\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.433890 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-login\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.433093 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.433930 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.433757 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.433913 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.433945 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpckw\" (UniqueName: \"kubernetes.io/projected/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-kube-api-access-fpckw\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.434011 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-error\") pod \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\" (UID: \"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.434228 4916 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.434247 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.434261 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.434275 4916 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.434711 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-utilities" (OuterVolumeSpecName: "utilities") pod "0d597107-0497-411e-8b94-fa47d1ddd065" (UID: "0d597107-0497-411e-8b94-fa47d1ddd065"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.435422 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.438650 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.438879 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.439136 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.439275 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.439548 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.439626 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.439676 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-kube-api-access-fpckw" (OuterVolumeSpecName: "kube-api-access-fpckw") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "kube-api-access-fpckw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.439724 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.439827 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" (UID: "b69fe1de-4b0b-4f4a-b1f8-db7be29f3067"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.489221 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d597107-0497-411e-8b94-fa47d1ddd065" (UID: "0d597107-0497-411e-8b94-fa47d1ddd065"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535276 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcfsh\" (UniqueName: \"kubernetes.io/projected/0d597107-0497-411e-8b94-fa47d1ddd065-kube-api-access-gcfsh\") pod \"0d597107-0497-411e-8b94-fa47d1ddd065\" (UID: \"0d597107-0497-411e-8b94-fa47d1ddd065\") " Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535743 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535777 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535797 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535820 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535839 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535857 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535876 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpckw\" (UniqueName: \"kubernetes.io/projected/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-kube-api-access-fpckw\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535894 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535912 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535933 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535952 4916 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.535970 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d597107-0497-411e-8b94-fa47d1ddd065-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.539656 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d597107-0497-411e-8b94-fa47d1ddd065-kube-api-access-gcfsh" (OuterVolumeSpecName: "kube-api-access-gcfsh") pod "0d597107-0497-411e-8b94-fa47d1ddd065" (UID: "0d597107-0497-411e-8b94-fa47d1ddd065"). InnerVolumeSpecName "kube-api-access-gcfsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.636849 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcfsh\" (UniqueName: \"kubernetes.io/projected/0d597107-0497-411e-8b94-fa47d1ddd065-kube-api-access-gcfsh\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.855166 4916 generic.go:334] "Generic (PLEG): container finished" podID="b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" containerID="faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb" exitCode=0 Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.855237 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.855260 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" event={"ID":"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067","Type":"ContainerDied","Data":"faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb"} Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.855356 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9xdsf" event={"ID":"b69fe1de-4b0b-4f4a-b1f8-db7be29f3067","Type":"ContainerDied","Data":"30a9968685d5a1f4264e7a0808a2f27d596625e2f01f80fdd3be3cde3740a1b5"} Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.855386 4916 scope.go:117] "RemoveContainer" containerID="faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.861101 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-s7w54_0d597107-0497-411e-8b94-fa47d1ddd065/registry-server/0.log" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.863009 4916 generic.go:334] "Generic (PLEG): container finished" podID="0d597107-0497-411e-8b94-fa47d1ddd065" containerID="f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6" exitCode=137 Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.863158 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s7w54" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.863284 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7w54" event={"ID":"0d597107-0497-411e-8b94-fa47d1ddd065","Type":"ContainerDied","Data":"f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6"} Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.863356 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s7w54" event={"ID":"0d597107-0497-411e-8b94-fa47d1ddd065","Type":"ContainerDied","Data":"1b8ade59172824c1e3e6db57553bda2784c8224d3ea43c4cd16de060a12fc277"} Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.865239 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-k6d96" podUID="d4896794-1b4d-4663-aa0b-18c59d5c8486" containerName="registry-server" containerID="cri-o://a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1" gracePeriod=2 Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.865394 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nq575" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.865273 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nq575" event={"ID":"d46fd03d-fe64-42cf-9e43-aab1f8c7519f","Type":"ContainerDied","Data":"8d12fd0bbae829b83704c63978341ed86959cc635c39e64ae9160776d6ba980d"} Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.895985 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9xdsf"] Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.896176 4916 scope.go:117] "RemoveContainer" containerID="faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb" Dec 03 19:33:40 crc kubenswrapper[4916]: E1203 19:33:40.897089 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb\": container with ID starting with faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb not found: ID does not exist" containerID="faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.897142 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb"} err="failed to get container status \"faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb\": rpc error: code = NotFound desc = could not find container \"faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb\": container with ID starting with faed76ff39ea1851789b1a5737a25c629dbad9c131f10ac768f8480ff604fbcb not found: ID does not exist" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.897297 4916 scope.go:117] "RemoveContainer" containerID="f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.901150 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9xdsf"] Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.923747 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nq575"] Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.924537 4916 scope.go:117] "RemoveContainer" containerID="7ab6d04354b090871dfd1e439be0ae5d7706ce5cc93bd8a2f0a854c8373067c6" Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.935420 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nq575"] Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.940048 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s7w54"] Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.943154 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s7w54"] Dec 03 19:33:40 crc kubenswrapper[4916]: I1203 19:33:40.950903 4916 scope.go:117] "RemoveContainer" containerID="4117dd97c2983eb82524d61a3b3c8d70f7b6bf603d679b3bc44818b942db41d8" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.061190 4916 scope.go:117] "RemoveContainer" containerID="f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6" Dec 03 19:33:41 crc kubenswrapper[4916]: E1203 19:33:41.061705 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6\": container with ID starting with f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6 not found: ID does not exist" containerID="f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.061747 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6"} err="failed to get container status \"f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6\": rpc error: code = NotFound desc = could not find container \"f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6\": container with ID starting with f9d9a6a2c54d80da27f6ac0c0d6276c28ed1405aaa1f62b64a8797141ad59ba6 not found: ID does not exist" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.061811 4916 scope.go:117] "RemoveContainer" containerID="7ab6d04354b090871dfd1e439be0ae5d7706ce5cc93bd8a2f0a854c8373067c6" Dec 03 19:33:41 crc kubenswrapper[4916]: E1203 19:33:41.062447 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ab6d04354b090871dfd1e439be0ae5d7706ce5cc93bd8a2f0a854c8373067c6\": container with ID starting with 7ab6d04354b090871dfd1e439be0ae5d7706ce5cc93bd8a2f0a854c8373067c6 not found: ID does not exist" containerID="7ab6d04354b090871dfd1e439be0ae5d7706ce5cc93bd8a2f0a854c8373067c6" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.062491 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ab6d04354b090871dfd1e439be0ae5d7706ce5cc93bd8a2f0a854c8373067c6"} err="failed to get container status \"7ab6d04354b090871dfd1e439be0ae5d7706ce5cc93bd8a2f0a854c8373067c6\": rpc error: code = NotFound desc = could not find container \"7ab6d04354b090871dfd1e439be0ae5d7706ce5cc93bd8a2f0a854c8373067c6\": container with ID starting with 7ab6d04354b090871dfd1e439be0ae5d7706ce5cc93bd8a2f0a854c8373067c6 not found: ID does not exist" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.062521 4916 scope.go:117] "RemoveContainer" containerID="4117dd97c2983eb82524d61a3b3c8d70f7b6bf603d679b3bc44818b942db41d8" Dec 03 19:33:41 crc kubenswrapper[4916]: E1203 19:33:41.062880 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4117dd97c2983eb82524d61a3b3c8d70f7b6bf603d679b3bc44818b942db41d8\": container with ID starting with 4117dd97c2983eb82524d61a3b3c8d70f7b6bf603d679b3bc44818b942db41d8 not found: ID does not exist" containerID="4117dd97c2983eb82524d61a3b3c8d70f7b6bf603d679b3bc44818b942db41d8" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.062919 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4117dd97c2983eb82524d61a3b3c8d70f7b6bf603d679b3bc44818b942db41d8"} err="failed to get container status \"4117dd97c2983eb82524d61a3b3c8d70f7b6bf603d679b3bc44818b942db41d8\": rpc error: code = NotFound desc = could not find container \"4117dd97c2983eb82524d61a3b3c8d70f7b6bf603d679b3bc44818b942db41d8\": container with ID starting with 4117dd97c2983eb82524d61a3b3c8d70f7b6bf603d679b3bc44818b942db41d8 not found: ID does not exist" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.445676 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.457885 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxzlf\" (UniqueName: \"kubernetes.io/projected/d4896794-1b4d-4663-aa0b-18c59d5c8486-kube-api-access-mxzlf\") pod \"d4896794-1b4d-4663-aa0b-18c59d5c8486\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.457970 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-catalog-content\") pod \"d4896794-1b4d-4663-aa0b-18c59d5c8486\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.458045 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-utilities\") pod \"d4896794-1b4d-4663-aa0b-18c59d5c8486\" (UID: \"d4896794-1b4d-4663-aa0b-18c59d5c8486\") " Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.459669 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-utilities" (OuterVolumeSpecName: "utilities") pod "d4896794-1b4d-4663-aa0b-18c59d5c8486" (UID: "d4896794-1b4d-4663-aa0b-18c59d5c8486"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.461652 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4896794-1b4d-4663-aa0b-18c59d5c8486-kube-api-access-mxzlf" (OuterVolumeSpecName: "kube-api-access-mxzlf") pod "d4896794-1b4d-4663-aa0b-18c59d5c8486" (UID: "d4896794-1b4d-4663-aa0b-18c59d5c8486"). InnerVolumeSpecName "kube-api-access-mxzlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.559418 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.559496 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxzlf\" (UniqueName: \"kubernetes.io/projected/d4896794-1b4d-4663-aa0b-18c59d5c8486-kube-api-access-mxzlf\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.561986 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4896794-1b4d-4663-aa0b-18c59d5c8486" (UID: "d4896794-1b4d-4663-aa0b-18c59d5c8486"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.661100 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4896794-1b4d-4663-aa0b-18c59d5c8486-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.883104 4916 generic.go:334] "Generic (PLEG): container finished" podID="d4896794-1b4d-4663-aa0b-18c59d5c8486" containerID="a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1" exitCode=0 Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.883182 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6d96" event={"ID":"d4896794-1b4d-4663-aa0b-18c59d5c8486","Type":"ContainerDied","Data":"a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1"} Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.883211 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k6d96" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.883252 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k6d96" event={"ID":"d4896794-1b4d-4663-aa0b-18c59d5c8486","Type":"ContainerDied","Data":"ad119045671f3778689c7d65be38934ed6ffbccf52e53d50726b3ebaa62e5626"} Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.883298 4916 scope.go:117] "RemoveContainer" containerID="a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.908718 4916 scope.go:117] "RemoveContainer" containerID="255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.944871 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k6d96"] Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.948526 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-k6d96"] Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.964168 4916 scope.go:117] "RemoveContainer" containerID="12e0d5616024f3b2947ed313a06868e79ef7892b77417c6d9e325a50cd1f76de" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.995067 4916 scope.go:117] "RemoveContainer" containerID="a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1" Dec 03 19:33:41 crc kubenswrapper[4916]: E1203 19:33:41.996422 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1\": container with ID starting with a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1 not found: ID does not exist" containerID="a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.996582 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1"} err="failed to get container status \"a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1\": rpc error: code = NotFound desc = could not find container \"a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1\": container with ID starting with a60fbf3b92a70c4cd9b40c1a6dd00fcf1a8e547c6ce39ecba02c27f28604fdb1 not found: ID does not exist" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.996830 4916 scope.go:117] "RemoveContainer" containerID="255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab" Dec 03 19:33:41 crc kubenswrapper[4916]: E1203 19:33:41.997819 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab\": container with ID starting with 255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab not found: ID does not exist" containerID="255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.997947 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab"} err="failed to get container status \"255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab\": rpc error: code = NotFound desc = could not find container \"255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab\": container with ID starting with 255495985dc2b2c455375f0b2641db07c0158889b3f3e329f2d07862906228ab not found: ID does not exist" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.998055 4916 scope.go:117] "RemoveContainer" containerID="12e0d5616024f3b2947ed313a06868e79ef7892b77417c6d9e325a50cd1f76de" Dec 03 19:33:41 crc kubenswrapper[4916]: E1203 19:33:41.998997 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12e0d5616024f3b2947ed313a06868e79ef7892b77417c6d9e325a50cd1f76de\": container with ID starting with 12e0d5616024f3b2947ed313a06868e79ef7892b77417c6d9e325a50cd1f76de not found: ID does not exist" containerID="12e0d5616024f3b2947ed313a06868e79ef7892b77417c6d9e325a50cd1f76de" Dec 03 19:33:41 crc kubenswrapper[4916]: I1203 19:33:41.999128 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12e0d5616024f3b2947ed313a06868e79ef7892b77417c6d9e325a50cd1f76de"} err="failed to get container status \"12e0d5616024f3b2947ed313a06868e79ef7892b77417c6d9e325a50cd1f76de\": rpc error: code = NotFound desc = could not find container \"12e0d5616024f3b2947ed313a06868e79ef7892b77417c6d9e325a50cd1f76de\": container with ID starting with 12e0d5616024f3b2947ed313a06868e79ef7892b77417c6d9e325a50cd1f76de not found: ID does not exist" Dec 03 19:33:42 crc kubenswrapper[4916]: I1203 19:33:42.490055 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d597107-0497-411e-8b94-fa47d1ddd065" path="/var/lib/kubelet/pods/0d597107-0497-411e-8b94-fa47d1ddd065/volumes" Dec 03 19:33:42 crc kubenswrapper[4916]: I1203 19:33:42.492156 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" path="/var/lib/kubelet/pods/b69fe1de-4b0b-4f4a-b1f8-db7be29f3067/volumes" Dec 03 19:33:42 crc kubenswrapper[4916]: I1203 19:33:42.493134 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" path="/var/lib/kubelet/pods/d46fd03d-fe64-42cf-9e43-aab1f8c7519f/volumes" Dec 03 19:33:42 crc kubenswrapper[4916]: I1203 19:33:42.495509 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4896794-1b4d-4663-aa0b-18c59d5c8486" path="/var/lib/kubelet/pods/d4896794-1b4d-4663-aa0b-18c59d5c8486/volumes" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.715903 4916 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.716283 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd" gracePeriod=15 Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.716314 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4" gracePeriod=15 Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.716366 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694" gracePeriod=15 Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.716314 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73" gracePeriod=15 Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.716435 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741" gracePeriod=15 Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.718851 4916 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719094 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" containerName="extract-utilities" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719114 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" containerName="extract-utilities" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719130 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719138 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719150 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" containerName="extract-content" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719157 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" containerName="extract-content" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719169 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719177 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719185 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" containerName="oauth-openshift" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719193 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" containerName="oauth-openshift" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719203 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719210 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719220 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719228 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719239 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea30fa86-a8f4-4631-b664-06010167aea7" containerName="pruner" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719246 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea30fa86-a8f4-4631-b664-06010167aea7" containerName="pruner" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719286 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4896794-1b4d-4663-aa0b-18c59d5c8486" containerName="extract-content" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719294 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4896794-1b4d-4663-aa0b-18c59d5c8486" containerName="extract-content" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719305 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4896794-1b4d-4663-aa0b-18c59d5c8486" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719313 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4896794-1b4d-4663-aa0b-18c59d5c8486" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719323 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d597107-0497-411e-8b94-fa47d1ddd065" containerName="extract-content" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719330 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d597107-0497-411e-8b94-fa47d1ddd065" containerName="extract-content" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719339 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" containerName="extract-content" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719346 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" containerName="extract-content" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719357 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719364 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719374 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d597107-0497-411e-8b94-fa47d1ddd065" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719383 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d597107-0497-411e-8b94-fa47d1ddd065" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719393 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" containerName="extract-utilities" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719400 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" containerName="extract-utilities" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719411 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4896794-1b4d-4663-aa0b-18c59d5c8486" containerName="extract-utilities" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719418 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4896794-1b4d-4663-aa0b-18c59d5c8486" containerName="extract-utilities" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719428 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719435 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719445 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719453 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719462 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719469 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719478 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719485 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 19:33:43 crc kubenswrapper[4916]: E1203 19:33:43.719496 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d597107-0497-411e-8b94-fa47d1ddd065" containerName="extract-utilities" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719504 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d597107-0497-411e-8b94-fa47d1ddd065" containerName="extract-utilities" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719638 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719653 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f268348-698e-4764-8eb6-3ce43fdc4f9b" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719664 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719677 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4896794-1b4d-4663-aa0b-18c59d5c8486" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719688 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719698 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719708 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="b69fe1de-4b0b-4f4a-b1f8-db7be29f3067" containerName="oauth-openshift" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719720 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea30fa86-a8f4-4631-b664-06010167aea7" containerName="pruner" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719728 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="d46fd03d-fe64-42cf-9e43-aab1f8c7519f" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719739 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d597107-0497-411e-8b94-fa47d1ddd065" containerName="registry-server" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719747 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.719968 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.721278 4916 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.721952 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.726784 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.750235 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.788188 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.788388 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.788524 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.788656 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.788761 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.788850 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.788950 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.789039 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890320 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890366 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890388 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890416 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890436 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890460 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890460 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890482 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890491 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890524 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890526 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890533 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890549 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890460 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890606 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.890713 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.909008 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.910148 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.910752 4916 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4" exitCode=0 Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.910775 4916 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694" exitCode=0 Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.910782 4916 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73" exitCode=0 Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.910789 4916 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741" exitCode=2 Dec 03 19:33:43 crc kubenswrapper[4916]: I1203 19:33:43.910843 4916 scope.go:117] "RemoveContainer" containerID="3e54ebe9424cbe55524728e468c9f265bd0fe14820967752b5f65ae0a9b441e5" Dec 03 19:33:44 crc kubenswrapper[4916]: I1203 19:33:44.048249 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:33:44 crc kubenswrapper[4916]: W1203 19:33:44.066642 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-e2629381cb4aaa87be3036fedd355800383d6ef6dcbe9a54a9452a89844ace8b WatchSource:0}: Error finding container e2629381cb4aaa87be3036fedd355800383d6ef6dcbe9a54a9452a89844ace8b: Status 404 returned error can't find the container with id e2629381cb4aaa87be3036fedd355800383d6ef6dcbe9a54a9452a89844ace8b Dec 03 19:33:44 crc kubenswrapper[4916]: E1203 19:33:44.069713 4916 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.175:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187dcb8a59a5478c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 19:33:44.06903182 +0000 UTC m=+240.031842086,LastTimestamp:2025-12-03 19:33:44.06903182 +0000 UTC m=+240.031842086,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 19:33:44 crc kubenswrapper[4916]: E1203 19:33:44.156552 4916 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.175:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187dcb8a59a5478c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 19:33:44.06903182 +0000 UTC m=+240.031842086,LastTimestamp:2025-12-03 19:33:44.06903182 +0000 UTC m=+240.031842086,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 19:33:44 crc kubenswrapper[4916]: I1203 19:33:44.482896 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:44 crc kubenswrapper[4916]: I1203 19:33:44.921556 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1"} Dec 03 19:33:44 crc kubenswrapper[4916]: I1203 19:33:44.921639 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"e2629381cb4aaa87be3036fedd355800383d6ef6dcbe9a54a9452a89844ace8b"} Dec 03 19:33:44 crc kubenswrapper[4916]: I1203 19:33:44.922635 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:44 crc kubenswrapper[4916]: I1203 19:33:44.927293 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 19:33:44 crc kubenswrapper[4916]: I1203 19:33:44.938102 4916 generic.go:334] "Generic (PLEG): container finished" podID="7ca918bc-e4f8-485f-b091-092b90e45048" containerID="6ace1ac7a4e3aaf33c250d24a78ee08e5121378314099c94457accb1c712934a" exitCode=0 Dec 03 19:33:44 crc kubenswrapper[4916]: I1203 19:33:44.938162 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7ca918bc-e4f8-485f-b091-092b90e45048","Type":"ContainerDied","Data":"6ace1ac7a4e3aaf33c250d24a78ee08e5121378314099c94457accb1c712934a"} Dec 03 19:33:44 crc kubenswrapper[4916]: I1203 19:33:44.939247 4916 status_manager.go:851] "Failed to get status for pod" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:44 crc kubenswrapper[4916]: I1203 19:33:44.945238 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:45 crc kubenswrapper[4916]: E1203 19:33:45.529731 4916 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:45 crc kubenswrapper[4916]: E1203 19:33:45.530670 4916 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:45 crc kubenswrapper[4916]: E1203 19:33:45.531310 4916 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:45 crc kubenswrapper[4916]: E1203 19:33:45.532031 4916 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:45 crc kubenswrapper[4916]: E1203 19:33:45.532586 4916 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:45 crc kubenswrapper[4916]: I1203 19:33:45.532682 4916 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 03 19:33:45 crc kubenswrapper[4916]: E1203 19:33:45.533143 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="200ms" Dec 03 19:33:45 crc kubenswrapper[4916]: E1203 19:33:45.734938 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="400ms" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.119374 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.120834 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.121921 4916 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.122298 4916 status_manager.go:851] "Failed to get status for pod" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.122454 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: E1203 19:33:46.136723 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="800ms" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.207201 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.207554 4916 status_manager.go:851] "Failed to get status for pod" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.207741 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.207882 4916 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.218481 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7ca918bc-e4f8-485f-b091-092b90e45048-kube-api-access\") pod \"7ca918bc-e4f8-485f-b091-092b90e45048\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.218589 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-kubelet-dir\") pod \"7ca918bc-e4f8-485f-b091-092b90e45048\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.218635 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-var-lock\") pod \"7ca918bc-e4f8-485f-b091-092b90e45048\" (UID: \"7ca918bc-e4f8-485f-b091-092b90e45048\") " Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.218649 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.218683 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.218708 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.218721 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "7ca918bc-e4f8-485f-b091-092b90e45048" (UID: "7ca918bc-e4f8-485f-b091-092b90e45048"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.218785 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-var-lock" (OuterVolumeSpecName: "var-lock") pod "7ca918bc-e4f8-485f-b091-092b90e45048" (UID: "7ca918bc-e4f8-485f-b091-092b90e45048"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.218821 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.218832 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.218847 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.219013 4916 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.219037 4916 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7ca918bc-e4f8-485f-b091-092b90e45048-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.219055 4916 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.225769 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ca918bc-e4f8-485f-b091-092b90e45048-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "7ca918bc-e4f8-485f-b091-092b90e45048" (UID: "7ca918bc-e4f8-485f-b091-092b90e45048"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.320391 4916 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.320463 4916 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.320484 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7ca918bc-e4f8-485f-b091-092b90e45048-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.487833 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 03 19:33:46 crc kubenswrapper[4916]: E1203 19:33:46.937120 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="1.6s" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.953931 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.954482 4916 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd" exitCode=0 Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.954580 4916 scope.go:117] "RemoveContainer" containerID="38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.954616 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.955418 4916 status_manager.go:851] "Failed to get status for pod" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.955725 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.956038 4916 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.956452 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7ca918bc-e4f8-485f-b091-092b90e45048","Type":"ContainerDied","Data":"d8f34bbac60f9b11117734c97dabd0cd42189855a36b8690e8ed2908e4cd4f2e"} Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.956474 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8f34bbac60f9b11117734c97dabd0cd42189855a36b8690e8ed2908e4cd4f2e" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.956505 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.958840 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.959312 4916 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.959794 4916 status_manager.go:851] "Failed to get status for pod" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.961411 4916 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.961950 4916 status_manager.go:851] "Failed to get status for pod" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.962921 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.971503 4916 scope.go:117] "RemoveContainer" containerID="e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694" Dec 03 19:33:46 crc kubenswrapper[4916]: I1203 19:33:46.985528 4916 scope.go:117] "RemoveContainer" containerID="3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.001627 4916 scope.go:117] "RemoveContainer" containerID="3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.017020 4916 scope.go:117] "RemoveContainer" containerID="d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.054080 4916 scope.go:117] "RemoveContainer" containerID="9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.077065 4916 scope.go:117] "RemoveContainer" containerID="38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4" Dec 03 19:33:47 crc kubenswrapper[4916]: E1203 19:33:47.077461 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\": container with ID starting with 38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4 not found: ID does not exist" containerID="38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.077492 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4"} err="failed to get container status \"38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\": rpc error: code = NotFound desc = could not find container \"38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4\": container with ID starting with 38a712f997d60426690dba51864ba5a9c0226129327484553dafac0a59d1dea4 not found: ID does not exist" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.077519 4916 scope.go:117] "RemoveContainer" containerID="e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694" Dec 03 19:33:47 crc kubenswrapper[4916]: E1203 19:33:47.078018 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\": container with ID starting with e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694 not found: ID does not exist" containerID="e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.078059 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694"} err="failed to get container status \"e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\": rpc error: code = NotFound desc = could not find container \"e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694\": container with ID starting with e3d6d8a976eaedf68fa6de5577bbd08c8886761bee90d403ddbcf5f3ec325694 not found: ID does not exist" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.078095 4916 scope.go:117] "RemoveContainer" containerID="3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73" Dec 03 19:33:47 crc kubenswrapper[4916]: E1203 19:33:47.078400 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\": container with ID starting with 3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73 not found: ID does not exist" containerID="3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.078422 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73"} err="failed to get container status \"3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\": rpc error: code = NotFound desc = could not find container \"3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73\": container with ID starting with 3282be15d2d67d4891bc28b34436d347c28af010d5f19d5132802205eb158e73 not found: ID does not exist" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.078439 4916 scope.go:117] "RemoveContainer" containerID="3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741" Dec 03 19:33:47 crc kubenswrapper[4916]: E1203 19:33:47.078700 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\": container with ID starting with 3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741 not found: ID does not exist" containerID="3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.078718 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741"} err="failed to get container status \"3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\": rpc error: code = NotFound desc = could not find container \"3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741\": container with ID starting with 3d07724fa9fb9f248aba1e9a4b36533ca47b7a5d1d0aad59f6f4fdc7c85db741 not found: ID does not exist" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.078731 4916 scope.go:117] "RemoveContainer" containerID="d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd" Dec 03 19:33:47 crc kubenswrapper[4916]: E1203 19:33:47.078928 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\": container with ID starting with d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd not found: ID does not exist" containerID="d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.078955 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd"} err="failed to get container status \"d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\": rpc error: code = NotFound desc = could not find container \"d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd\": container with ID starting with d4619353da71d9ef9673dac00b01ff929cf4b9314b65a0e12d85f92a434c4fdd not found: ID does not exist" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.078973 4916 scope.go:117] "RemoveContainer" containerID="9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb" Dec 03 19:33:47 crc kubenswrapper[4916]: E1203 19:33:47.079216 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\": container with ID starting with 9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb not found: ID does not exist" containerID="9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb" Dec 03 19:33:47 crc kubenswrapper[4916]: I1203 19:33:47.079235 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb"} err="failed to get container status \"9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\": rpc error: code = NotFound desc = could not find container \"9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb\": container with ID starting with 9bf5bd27dbb1754881dfa1c278e07cd64982365b7c96460d3862afe87f07abcb not found: ID does not exist" Dec 03 19:33:48 crc kubenswrapper[4916]: E1203 19:33:48.538730 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="3.2s" Dec 03 19:33:51 crc kubenswrapper[4916]: E1203 19:33:51.740257 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="6.4s" Dec 03 19:33:54 crc kubenswrapper[4916]: E1203 19:33:54.157865 4916 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.175:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187dcb8a59a5478c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 19:33:44.06903182 +0000 UTC m=+240.031842086,LastTimestamp:2025-12-03 19:33:44.06903182 +0000 UTC m=+240.031842086,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 19:33:54 crc kubenswrapper[4916]: I1203 19:33:54.481188 4916 status_manager.go:851] "Failed to get status for pod" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:54 crc kubenswrapper[4916]: I1203 19:33:54.482068 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.035854 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.036588 4916 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074" exitCode=1 Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.036648 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074"} Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.037650 4916 scope.go:117] "RemoveContainer" containerID="6bdee4ac477464ce2c95ca370f229ecdb32a62a0480c22625643275d1e1ac074" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.038175 4916 status_manager.go:851] "Failed to get status for pod" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.039008 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.039915 4916 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.477291 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.478817 4916 status_manager.go:851] "Failed to get status for pod" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.479162 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.479623 4916 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.503338 4916 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.503397 4916 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:33:57 crc kubenswrapper[4916]: E1203 19:33:57.504169 4916 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:57 crc kubenswrapper[4916]: I1203 19:33:57.505227 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:57 crc kubenswrapper[4916]: W1203 19:33:57.531914 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-353ed4d28006caa82c16ff98ea48a4be7578249aaf8997e7a2d3f5fbbad03a23 WatchSource:0}: Error finding container 353ed4d28006caa82c16ff98ea48a4be7578249aaf8997e7a2d3f5fbbad03a23: Status 404 returned error can't find the container with id 353ed4d28006caa82c16ff98ea48a4be7578249aaf8997e7a2d3f5fbbad03a23 Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.046750 4916 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="54a5f71358925b88179f7ca61a1dd0d319364729e880931e71a032f784def5ac" exitCode=0 Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.046879 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"54a5f71358925b88179f7ca61a1dd0d319364729e880931e71a032f784def5ac"} Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.047391 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"353ed4d28006caa82c16ff98ea48a4be7578249aaf8997e7a2d3f5fbbad03a23"} Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.047980 4916 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.048009 4916 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:33:58 crc kubenswrapper[4916]: E1203 19:33:58.048649 4916 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.048779 4916 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.049383 4916 status_manager.go:851] "Failed to get status for pod" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.050229 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.054531 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.054643 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e38489eadce7522a75cec6ae37cc5e52f519ae36849e993cdd8db5600ef2202a"} Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.056268 4916 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.056948 4916 status_manager.go:851] "Failed to get status for pod" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:58 crc kubenswrapper[4916]: I1203 19:33:58.057285 4916 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.175:6443: connect: connection refused" Dec 03 19:33:58 crc kubenswrapper[4916]: E1203 19:33:58.141601 4916 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.175:6443: connect: connection refused" interval="7s" Dec 03 19:33:59 crc kubenswrapper[4916]: I1203 19:33:59.068913 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"74e75bd15a2eb495e0172b53c7868b8bb3c954b6dfddea4cd36db68ce595fd55"} Dec 03 19:33:59 crc kubenswrapper[4916]: I1203 19:33:59.068971 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"48489ca5a931e41a9121f3b9a4fe424ec4252a70a1b535a74bdbd98b8cb058ea"} Dec 03 19:33:59 crc kubenswrapper[4916]: I1203 19:33:59.068985 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"168386df864b6c7e9899c63b8b292e765e324bdb5764696f2d1d838bf67fea13"} Dec 03 19:34:00 crc kubenswrapper[4916]: I1203 19:34:00.077633 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"05304db11035256ee87735aec0f00cbe183a74af5eff9f03f4057d3bac5096e8"} Dec 03 19:34:00 crc kubenswrapper[4916]: I1203 19:34:00.077683 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a0a90244762d5a76fe1dbfd03a39179eef7b7f2bac71e4a47c41a283f5bbc91b"} Dec 03 19:34:00 crc kubenswrapper[4916]: I1203 19:34:00.077825 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:34:00 crc kubenswrapper[4916]: I1203 19:34:00.077941 4916 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:34:00 crc kubenswrapper[4916]: I1203 19:34:00.077967 4916 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:34:02 crc kubenswrapper[4916]: I1203 19:34:02.506144 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:34:02 crc kubenswrapper[4916]: I1203 19:34:02.506436 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:34:02 crc kubenswrapper[4916]: I1203 19:34:02.514305 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:34:04 crc kubenswrapper[4916]: I1203 19:34:04.004973 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:34:04 crc kubenswrapper[4916]: I1203 19:34:04.692386 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:34:04 crc kubenswrapper[4916]: I1203 19:34:04.697458 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:34:05 crc kubenswrapper[4916]: I1203 19:34:05.095471 4916 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:34:05 crc kubenswrapper[4916]: I1203 19:34:05.210832 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="561a38c7-208c-409c-b755-2d9bfff8606a" Dec 03 19:34:06 crc kubenswrapper[4916]: I1203 19:34:06.117201 4916 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:34:06 crc kubenswrapper[4916]: I1203 19:34:06.117595 4916 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:34:06 crc kubenswrapper[4916]: I1203 19:34:06.123205 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:34:06 crc kubenswrapper[4916]: I1203 19:34:06.124652 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="561a38c7-208c-409c-b755-2d9bfff8606a" Dec 03 19:34:07 crc kubenswrapper[4916]: I1203 19:34:07.122859 4916 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:34:07 crc kubenswrapper[4916]: I1203 19:34:07.122888 4916 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:34:07 crc kubenswrapper[4916]: I1203 19:34:07.128971 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="561a38c7-208c-409c-b755-2d9bfff8606a" Dec 03 19:34:14 crc kubenswrapper[4916]: I1203 19:34:14.011120 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 19:34:15 crc kubenswrapper[4916]: I1203 19:34:15.081970 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 19:34:15 crc kubenswrapper[4916]: I1203 19:34:15.990925 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 19:34:15 crc kubenswrapper[4916]: I1203 19:34:15.999652 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.029008 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.152396 4916 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.155515 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=33.155497446 podStartE2EDuration="33.155497446s" podCreationTimestamp="2025-12-03 19:33:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:34:05.11683448 +0000 UTC m=+261.079644766" watchObservedRunningTime="2025-12-03 19:34:16.155497446 +0000 UTC m=+272.118307722" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.157351 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.157399 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-c4f645f59-bt9fn","openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 19:34:16 crc kubenswrapper[4916]: E1203 19:34:16.157624 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" containerName="installer" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.157644 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" containerName="installer" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.157963 4916 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.158000 4916 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="2eb3523e-d42c-4762-ab12-8b5fdfeb3a47" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.158006 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ca918bc-e4f8-485f-b091-092b90e45048" containerName="installer" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.158616 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.164627 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.164938 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.164944 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.165089 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.165195 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.165253 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.165296 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.165537 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.165540 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.166264 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.171244 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.171280 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.171512 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.182862 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.183208 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.190030 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.197561 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=11.197536637 podStartE2EDuration="11.197536637s" podCreationTimestamp="2025-12-03 19:34:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:34:16.195162535 +0000 UTC m=+272.157972871" watchObservedRunningTime="2025-12-03 19:34:16.197536637 +0000 UTC m=+272.160346943" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348295 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-template-login\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348370 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dw8x6\" (UniqueName: \"kubernetes.io/projected/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-kube-api-access-dw8x6\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348421 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-session\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348469 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348510 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348553 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348629 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348678 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-service-ca\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348715 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348749 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-router-certs\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348788 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348821 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-template-error\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348861 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-audit-policies\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.348897 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-audit-dir\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.450535 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-audit-dir\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.450952 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-template-login\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.450704 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-audit-dir\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.450992 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dw8x6\" (UniqueName: \"kubernetes.io/projected/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-kube-api-access-dw8x6\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.451050 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-session\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.451087 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.451120 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.451154 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.451197 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.451246 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-service-ca\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.451284 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.451318 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-router-certs\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.451353 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.451384 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-template-error\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.451422 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-audit-policies\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.452517 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-audit-policies\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.453209 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.453699 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-service-ca\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.455865 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.457336 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.457466 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-template-login\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.457732 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.457963 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.458746 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-user-template-error\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.459030 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-router-certs\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.460102 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-session\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.462070 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.475466 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dw8x6\" (UniqueName: \"kubernetes.io/projected/cedd3c4f-90f7-4298-84e0-80cfb060aa9c-kube-api-access-dw8x6\") pod \"oauth-openshift-c4f645f59-bt9fn\" (UID: \"cedd3c4f-90f7-4298-84e0-80cfb060aa9c\") " pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.483476 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.490438 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.491422 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.590074 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 19:34:16 crc kubenswrapper[4916]: I1203 19:34:16.749239 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 19:34:17 crc kubenswrapper[4916]: I1203 19:34:17.096758 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 19:34:17 crc kubenswrapper[4916]: I1203 19:34:17.243609 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 19:34:17 crc kubenswrapper[4916]: I1203 19:34:17.287636 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 19:34:17 crc kubenswrapper[4916]: I1203 19:34:17.443681 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 19:34:17 crc kubenswrapper[4916]: I1203 19:34:17.465408 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 19:34:17 crc kubenswrapper[4916]: I1203 19:34:17.683870 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 19:34:17 crc kubenswrapper[4916]: I1203 19:34:17.703049 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 19:34:17 crc kubenswrapper[4916]: I1203 19:34:17.995613 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.127970 4916 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.141082 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.292694 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.333826 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.338855 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.362968 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.441887 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.442132 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.477800 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.512780 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.576822 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.612692 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.676012 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.714724 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.778825 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.813488 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 19:34:18 crc kubenswrapper[4916]: I1203 19:34:18.825972 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.015181 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.043353 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.089903 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.154389 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.175174 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.197066 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.276181 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.317175 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.356639 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.441547 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.458073 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.459723 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.469132 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.642859 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.653321 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.655247 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.761200 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.793951 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.821554 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.863531 4916 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.906754 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.950912 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.962025 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.962196 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.973426 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 19:34:19 crc kubenswrapper[4916]: I1203 19:34:19.983715 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.111880 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.119225 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.159893 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.174344 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.188679 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.228744 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.318045 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.347416 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.350338 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.373532 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.535278 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.560703 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.615827 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.649751 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.674428 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.755042 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.804978 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.937010 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.974182 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 19:34:20 crc kubenswrapper[4916]: I1203 19:34:20.974186 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.037369 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.062514 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.064437 4916 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.078453 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.102023 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.205294 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.238802 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.242430 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.312139 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.335706 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.339494 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.346417 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.358325 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.403138 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.454447 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.491991 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.603577 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.680316 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.806838 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.832553 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.832863 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.932807 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 19:34:21 crc kubenswrapper[4916]: I1203 19:34:21.945800 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.022968 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.062883 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.081531 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.137744 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.228382 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.253820 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.467620 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.634224 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.639360 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.647608 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.658919 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.668684 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.721024 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.791320 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.807162 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.911861 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.944295 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 19:34:22 crc kubenswrapper[4916]: I1203 19:34:22.972346 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.004148 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.019157 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.040252 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.111040 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.119074 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.142385 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.144040 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.179626 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.200407 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.252985 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.520941 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.691795 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.725025 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.947456 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 19:34:23 crc kubenswrapper[4916]: I1203 19:34:23.976178 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.007683 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.016220 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.045384 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.047107 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.078553 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.165165 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.166133 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.229473 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.301948 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.351750 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.374362 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.417547 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.441273 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.489424 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.528056 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.603751 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.650380 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.681464 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.751516 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.754407 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.792335 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.817943 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.841681 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.898253 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 19:34:24 crc kubenswrapper[4916]: I1203 19:34:24.938476 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.036482 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.045719 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.077180 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.077230 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.104786 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.132095 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.133520 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.136349 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.147267 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.246143 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.347645 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.595901 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.676483 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.816438 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.827172 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.847784 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 19:34:25 crc kubenswrapper[4916]: I1203 19:34:25.949086 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.009324 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.186891 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.245226 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.280702 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.430089 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.501927 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.544182 4916 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.544551 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1" gracePeriod=5 Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.570222 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.576110 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.603623 4916 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.740258 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.756733 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.841937 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.877972 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.929690 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 19:34:26 crc kubenswrapper[4916]: I1203 19:34:26.944708 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.010631 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.182929 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.185852 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.237111 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.326207 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.356197 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.367028 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.451073 4916 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.454714 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.759780 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.802796 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.828214 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.891644 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.894260 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 19:34:27 crc kubenswrapper[4916]: I1203 19:34:27.913477 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.043368 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.068715 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.083482 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.085140 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.131478 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.316053 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.455094 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.512866 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.541214 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.593070 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.657791 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.709054 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.813223 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 19:34:28 crc kubenswrapper[4916]: I1203 19:34:28.867493 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 19:34:29 crc kubenswrapper[4916]: I1203 19:34:29.151163 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 19:34:29 crc kubenswrapper[4916]: I1203 19:34:29.181383 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 19:34:29 crc kubenswrapper[4916]: I1203 19:34:29.304318 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 19:34:29 crc kubenswrapper[4916]: I1203 19:34:29.513146 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 19:34:29 crc kubenswrapper[4916]: I1203 19:34:29.615082 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 19:34:29 crc kubenswrapper[4916]: I1203 19:34:29.731535 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 19:34:29 crc kubenswrapper[4916]: I1203 19:34:29.864757 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 19:34:30 crc kubenswrapper[4916]: I1203 19:34:30.026218 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 19:34:30 crc kubenswrapper[4916]: I1203 19:34:30.035831 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 19:34:30 crc kubenswrapper[4916]: I1203 19:34:30.063558 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 19:34:30 crc kubenswrapper[4916]: I1203 19:34:30.370517 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-c4f645f59-bt9fn"] Dec 03 19:34:30 crc kubenswrapper[4916]: I1203 19:34:30.409325 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 19:34:30 crc kubenswrapper[4916]: I1203 19:34:30.672867 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-c4f645f59-bt9fn"] Dec 03 19:34:30 crc kubenswrapper[4916]: I1203 19:34:30.743213 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.273378 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" event={"ID":"cedd3c4f-90f7-4298-84e0-80cfb060aa9c","Type":"ContainerStarted","Data":"229c541ae57b184a76426e740f6abfd39d95b137af8733915b11e7417142d2c2"} Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.273935 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.274003 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" event={"ID":"cedd3c4f-90f7-4298-84e0-80cfb060aa9c","Type":"ContainerStarted","Data":"d5cd88879c902213334f81448d1013fb06eeb4a0143a255fcd1e7fa0d0de9ab9"} Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.297776 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" podStartSLOduration=79.297758979 podStartE2EDuration="1m19.297758979s" podCreationTimestamp="2025-12-03 19:33:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:34:31.29628273 +0000 UTC m=+287.259093026" watchObservedRunningTime="2025-12-03 19:34:31.297758979 +0000 UTC m=+287.260569245" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.352367 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.596631 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-c4f645f59-bt9fn" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.654593 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.654659 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.723327 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.741603 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.741676 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.741716 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.741749 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.741807 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.741833 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.741919 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.741991 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.742495 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.742708 4916 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.742721 4916 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.742728 4916 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.742736 4916 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.753431 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.808144 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 19:34:31 crc kubenswrapper[4916]: I1203 19:34:31.843460 4916 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.282386 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.282478 4916 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1" exitCode=137 Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.282665 4916 scope.go:117] "RemoveContainer" containerID="9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1" Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.282677 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.307860 4916 scope.go:117] "RemoveContainer" containerID="9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1" Dec 03 19:34:32 crc kubenswrapper[4916]: E1203 19:34:32.308393 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1\": container with ID starting with 9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1 not found: ID does not exist" containerID="9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1" Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.308468 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1"} err="failed to get container status \"9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1\": rpc error: code = NotFound desc = could not find container \"9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1\": container with ID starting with 9b661a1f1f890c187acc389ee53081dc60a336e2bd431c239113bf5037f419a1 not found: ID does not exist" Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.491109 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.491660 4916 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.504921 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.504973 4916 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="8680644e-234d-45d3-8de9-665aa5d02cb6" Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.509848 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 19:34:32 crc kubenswrapper[4916]: I1203 19:34:32.509886 4916 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="8680644e-234d-45d3-8de9-665aa5d02cb6" Dec 03 19:34:44 crc kubenswrapper[4916]: I1203 19:34:44.224120 4916 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 03 19:34:51 crc kubenswrapper[4916]: I1203 19:34:51.620614 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5w58b"] Dec 03 19:34:51 crc kubenswrapper[4916]: I1203 19:34:51.621217 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" podUID="f8ba73c5-9167-4354-b1d2-896a40e52e1c" containerName="controller-manager" containerID="cri-o://96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc" gracePeriod=30 Dec 03 19:34:51 crc kubenswrapper[4916]: I1203 19:34:51.689066 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr"] Dec 03 19:34:51 crc kubenswrapper[4916]: I1203 19:34:51.689282 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" podUID="8b9af469-c457-4818-a486-26fc2ca77b9a" containerName="route-controller-manager" containerID="cri-o://695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59" gracePeriod=30 Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.011219 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.014873 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.165207 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b9af469-c457-4818-a486-26fc2ca77b9a-serving-cert\") pod \"8b9af469-c457-4818-a486-26fc2ca77b9a\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.165513 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-proxy-ca-bundles\") pod \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.165661 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsj5h\" (UniqueName: \"kubernetes.io/projected/8b9af469-c457-4818-a486-26fc2ca77b9a-kube-api-access-vsj5h\") pod \"8b9af469-c457-4818-a486-26fc2ca77b9a\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.166318 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-config\") pod \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.166444 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8ba73c5-9167-4354-b1d2-896a40e52e1c-serving-cert\") pod \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.166559 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pp4gs\" (UniqueName: \"kubernetes.io/projected/f8ba73c5-9167-4354-b1d2-896a40e52e1c-kube-api-access-pp4gs\") pod \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.166685 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-client-ca\") pod \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\" (UID: \"f8ba73c5-9167-4354-b1d2-896a40e52e1c\") " Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.166797 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-client-ca\") pod \"8b9af469-c457-4818-a486-26fc2ca77b9a\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.166845 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-config\") pod \"8b9af469-c457-4818-a486-26fc2ca77b9a\" (UID: \"8b9af469-c457-4818-a486-26fc2ca77b9a\") " Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.167009 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-config" (OuterVolumeSpecName: "config") pod "f8ba73c5-9167-4354-b1d2-896a40e52e1c" (UID: "f8ba73c5-9167-4354-b1d2-896a40e52e1c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.167060 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "f8ba73c5-9167-4354-b1d2-896a40e52e1c" (UID: "f8ba73c5-9167-4354-b1d2-896a40e52e1c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.167830 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-client-ca" (OuterVolumeSpecName: "client-ca") pod "f8ba73c5-9167-4354-b1d2-896a40e52e1c" (UID: "f8ba73c5-9167-4354-b1d2-896a40e52e1c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.168095 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-client-ca" (OuterVolumeSpecName: "client-ca") pod "8b9af469-c457-4818-a486-26fc2ca77b9a" (UID: "8b9af469-c457-4818-a486-26fc2ca77b9a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.168256 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-config" (OuterVolumeSpecName: "config") pod "8b9af469-c457-4818-a486-26fc2ca77b9a" (UID: "8b9af469-c457-4818-a486-26fc2ca77b9a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.172042 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ba73c5-9167-4354-b1d2-896a40e52e1c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f8ba73c5-9167-4354-b1d2-896a40e52e1c" (UID: "f8ba73c5-9167-4354-b1d2-896a40e52e1c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.172545 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b9af469-c457-4818-a486-26fc2ca77b9a-kube-api-access-vsj5h" (OuterVolumeSpecName: "kube-api-access-vsj5h") pod "8b9af469-c457-4818-a486-26fc2ca77b9a" (UID: "8b9af469-c457-4818-a486-26fc2ca77b9a"). InnerVolumeSpecName "kube-api-access-vsj5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.172967 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9af469-c457-4818-a486-26fc2ca77b9a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8b9af469-c457-4818-a486-26fc2ca77b9a" (UID: "8b9af469-c457-4818-a486-26fc2ca77b9a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.173440 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ba73c5-9167-4354-b1d2-896a40e52e1c-kube-api-access-pp4gs" (OuterVolumeSpecName: "kube-api-access-pp4gs") pod "f8ba73c5-9167-4354-b1d2-896a40e52e1c" (UID: "f8ba73c5-9167-4354-b1d2-896a40e52e1c"). InnerVolumeSpecName "kube-api-access-pp4gs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.268290 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.268339 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8b9af469-c457-4818-a486-26fc2ca77b9a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.268350 4916 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.268362 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.268371 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsj5h\" (UniqueName: \"kubernetes.io/projected/8b9af469-c457-4818-a486-26fc2ca77b9a-kube-api-access-vsj5h\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.268381 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f8ba73c5-9167-4354-b1d2-896a40e52e1c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.268389 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pp4gs\" (UniqueName: \"kubernetes.io/projected/f8ba73c5-9167-4354-b1d2-896a40e52e1c-kube-api-access-pp4gs\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.268396 4916 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f8ba73c5-9167-4354-b1d2-896a40e52e1c-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.268404 4916 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8b9af469-c457-4818-a486-26fc2ca77b9a-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.414514 4916 generic.go:334] "Generic (PLEG): container finished" podID="f8ba73c5-9167-4354-b1d2-896a40e52e1c" containerID="96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc" exitCode=0 Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.414638 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" event={"ID":"f8ba73c5-9167-4354-b1d2-896a40e52e1c","Type":"ContainerDied","Data":"96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc"} Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.414725 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" event={"ID":"f8ba73c5-9167-4354-b1d2-896a40e52e1c","Type":"ContainerDied","Data":"ccb6938a8bc125fe4d1ed78e4441fd2c69be00436484eb868548feffa33e1766"} Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.414758 4916 scope.go:117] "RemoveContainer" containerID="96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.414770 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-5w58b" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.416736 4916 generic.go:334] "Generic (PLEG): container finished" podID="8b9af469-c457-4818-a486-26fc2ca77b9a" containerID="695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59" exitCode=0 Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.416766 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" event={"ID":"8b9af469-c457-4818-a486-26fc2ca77b9a","Type":"ContainerDied","Data":"695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59"} Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.416790 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" event={"ID":"8b9af469-c457-4818-a486-26fc2ca77b9a","Type":"ContainerDied","Data":"943c2390d0b4e417d4f4c7e8bcf3ae5bf28705708cff7cdb6c7263016fd6e7f7"} Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.416794 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.439261 4916 scope.go:117] "RemoveContainer" containerID="96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc" Dec 03 19:34:52 crc kubenswrapper[4916]: E1203 19:34:52.440778 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc\": container with ID starting with 96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc not found: ID does not exist" containerID="96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.440804 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc"} err="failed to get container status \"96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc\": rpc error: code = NotFound desc = could not find container \"96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc\": container with ID starting with 96e4102e37793bb16ba3ae22b548a7eca37bade84877cc4cdaef6def1859c2dc not found: ID does not exist" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.440825 4916 scope.go:117] "RemoveContainer" containerID="695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.442906 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr"] Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.451949 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-54gpr"] Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.467022 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5w58b"] Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.469625 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-5w58b"] Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.470355 4916 scope.go:117] "RemoveContainer" containerID="695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59" Dec 03 19:34:52 crc kubenswrapper[4916]: E1203 19:34:52.471117 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59\": container with ID starting with 695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59 not found: ID does not exist" containerID="695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.471178 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59"} err="failed to get container status \"695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59\": rpc error: code = NotFound desc = could not find container \"695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59\": container with ID starting with 695b4f051aca6d3efa7435b6becc4111469ba84a3257933701eb5054ea5d9a59 not found: ID does not exist" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.487828 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b9af469-c457-4818-a486-26fc2ca77b9a" path="/var/lib/kubelet/pods/8b9af469-c457-4818-a486-26fc2ca77b9a/volumes" Dec 03 19:34:52 crc kubenswrapper[4916]: I1203 19:34:52.488351 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8ba73c5-9167-4354-b1d2-896a40e52e1c" path="/var/lib/kubelet/pods/f8ba73c5-9167-4354-b1d2-896a40e52e1c/volumes" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.069129 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-77cc845589-z92gf"] Dec 03 19:34:53 crc kubenswrapper[4916]: E1203 19:34:53.069411 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.069422 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 19:34:53 crc kubenswrapper[4916]: E1203 19:34:53.069437 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b9af469-c457-4818-a486-26fc2ca77b9a" containerName="route-controller-manager" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.069444 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b9af469-c457-4818-a486-26fc2ca77b9a" containerName="route-controller-manager" Dec 03 19:34:53 crc kubenswrapper[4916]: E1203 19:34:53.069458 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ba73c5-9167-4354-b1d2-896a40e52e1c" containerName="controller-manager" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.069466 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ba73c5-9167-4354-b1d2-896a40e52e1c" containerName="controller-manager" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.069576 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.069585 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ba73c5-9167-4354-b1d2-896a40e52e1c" containerName="controller-manager" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.069594 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b9af469-c457-4818-a486-26fc2ca77b9a" containerName="route-controller-manager" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.069976 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.071633 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.072822 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.072894 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.073664 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.073769 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.074020 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.077528 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs"] Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.080363 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.083352 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-77cc845589-z92gf"] Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.086539 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs"] Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.087638 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.087676 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.088243 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.088698 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.088816 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.088923 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.092221 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.180840 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-client-ca\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.181126 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/22d485bf-96e8-49cf-9c57-419bf939e73c-serving-cert\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.181165 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-config\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.181200 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjbq9\" (UniqueName: \"kubernetes.io/projected/77c8412e-f3e1-4ae3-bed4-53cbbf092011-kube-api-access-bjbq9\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.181299 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2njgf\" (UniqueName: \"kubernetes.io/projected/22d485bf-96e8-49cf-9c57-419bf939e73c-kube-api-access-2njgf\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.181332 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77c8412e-f3e1-4ae3-bed4-53cbbf092011-serving-cert\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.181445 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/22d485bf-96e8-49cf-9c57-419bf939e73c-client-ca\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.181631 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/22d485bf-96e8-49cf-9c57-419bf939e73c-proxy-ca-bundles\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.181770 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22d485bf-96e8-49cf-9c57-419bf939e73c-config\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.282986 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-client-ca\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.283060 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/22d485bf-96e8-49cf-9c57-419bf939e73c-serving-cert\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.283111 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-config\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.283142 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjbq9\" (UniqueName: \"kubernetes.io/projected/77c8412e-f3e1-4ae3-bed4-53cbbf092011-kube-api-access-bjbq9\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.283708 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2njgf\" (UniqueName: \"kubernetes.io/projected/22d485bf-96e8-49cf-9c57-419bf939e73c-kube-api-access-2njgf\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.283808 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77c8412e-f3e1-4ae3-bed4-53cbbf092011-serving-cert\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.284369 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/22d485bf-96e8-49cf-9c57-419bf939e73c-client-ca\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.284496 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/22d485bf-96e8-49cf-9c57-419bf939e73c-proxy-ca-bundles\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.284559 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22d485bf-96e8-49cf-9c57-419bf939e73c-config\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.285348 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-config\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.287212 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-client-ca\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.287469 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/22d485bf-96e8-49cf-9c57-419bf939e73c-proxy-ca-bundles\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.288961 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/22d485bf-96e8-49cf-9c57-419bf939e73c-serving-cert\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.289606 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/22d485bf-96e8-49cf-9c57-419bf939e73c-client-ca\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.300502 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22d485bf-96e8-49cf-9c57-419bf939e73c-config\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.311985 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2njgf\" (UniqueName: \"kubernetes.io/projected/22d485bf-96e8-49cf-9c57-419bf939e73c-kube-api-access-2njgf\") pod \"controller-manager-77cc845589-z92gf\" (UID: \"22d485bf-96e8-49cf-9c57-419bf939e73c\") " pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.314826 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77c8412e-f3e1-4ae3-bed4-53cbbf092011-serving-cert\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.321257 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjbq9\" (UniqueName: \"kubernetes.io/projected/77c8412e-f3e1-4ae3-bed4-53cbbf092011-kube-api-access-bjbq9\") pod \"route-controller-manager-866f46fcdc-8q4zs\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.387793 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.401454 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.686448 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs"] Dec 03 19:34:53 crc kubenswrapper[4916]: I1203 19:34:53.898839 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-77cc845589-z92gf"] Dec 03 19:34:53 crc kubenswrapper[4916]: W1203 19:34:53.908819 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22d485bf_96e8_49cf_9c57_419bf939e73c.slice/crio-d792f24b84eebe08a0c18ddf7d54862eb1673a334cce55c548c859c511623690 WatchSource:0}: Error finding container d792f24b84eebe08a0c18ddf7d54862eb1673a334cce55c548c859c511623690: Status 404 returned error can't find the container with id d792f24b84eebe08a0c18ddf7d54862eb1673a334cce55c548c859c511623690 Dec 03 19:34:54 crc kubenswrapper[4916]: I1203 19:34:54.433223 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" event={"ID":"22d485bf-96e8-49cf-9c57-419bf939e73c","Type":"ContainerStarted","Data":"bff2cd20270257ff1e8a0187d8e677f53bfa3aefb0589c277cfddc772518107f"} Dec 03 19:34:54 crc kubenswrapper[4916]: I1203 19:34:54.433544 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" event={"ID":"22d485bf-96e8-49cf-9c57-419bf939e73c","Type":"ContainerStarted","Data":"d792f24b84eebe08a0c18ddf7d54862eb1673a334cce55c548c859c511623690"} Dec 03 19:34:54 crc kubenswrapper[4916]: I1203 19:34:54.433585 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:54 crc kubenswrapper[4916]: I1203 19:34:54.435222 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" event={"ID":"77c8412e-f3e1-4ae3-bed4-53cbbf092011","Type":"ContainerStarted","Data":"78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b"} Dec 03 19:34:54 crc kubenswrapper[4916]: I1203 19:34:54.435259 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" event={"ID":"77c8412e-f3e1-4ae3-bed4-53cbbf092011","Type":"ContainerStarted","Data":"76df237b414aad1eb41ba3cdfdfb709ec9939857ece7d82d87fdfeaebd444ab9"} Dec 03 19:34:54 crc kubenswrapper[4916]: I1203 19:34:54.435456 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:54 crc kubenswrapper[4916]: I1203 19:34:54.441166 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" Dec 03 19:34:54 crc kubenswrapper[4916]: I1203 19:34:54.482713 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-77cc845589-z92gf" podStartSLOduration=3.482695976 podStartE2EDuration="3.482695976s" podCreationTimestamp="2025-12-03 19:34:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:34:54.477244394 +0000 UTC m=+310.440054660" watchObservedRunningTime="2025-12-03 19:34:54.482695976 +0000 UTC m=+310.445506242" Dec 03 19:34:54 crc kubenswrapper[4916]: I1203 19:34:54.505303 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:34:54 crc kubenswrapper[4916]: I1203 19:34:54.536686 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" podStartSLOduration=3.53666403 podStartE2EDuration="3.53666403s" podCreationTimestamp="2025-12-03 19:34:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:34:54.527767817 +0000 UTC m=+310.490578083" watchObservedRunningTime="2025-12-03 19:34:54.53666403 +0000 UTC m=+310.499474306" Dec 03 19:35:11 crc kubenswrapper[4916]: I1203 19:35:11.981717 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs"] Dec 03 19:35:11 crc kubenswrapper[4916]: I1203 19:35:11.982665 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" podUID="77c8412e-f3e1-4ae3-bed4-53cbbf092011" containerName="route-controller-manager" containerID="cri-o://78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b" gracePeriod=30 Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.448987 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.543283 4916 generic.go:334] "Generic (PLEG): container finished" podID="77c8412e-f3e1-4ae3-bed4-53cbbf092011" containerID="78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b" exitCode=0 Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.543348 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" event={"ID":"77c8412e-f3e1-4ae3-bed4-53cbbf092011","Type":"ContainerDied","Data":"78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b"} Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.543380 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.543433 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs" event={"ID":"77c8412e-f3e1-4ae3-bed4-53cbbf092011","Type":"ContainerDied","Data":"76df237b414aad1eb41ba3cdfdfb709ec9939857ece7d82d87fdfeaebd444ab9"} Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.543466 4916 scope.go:117] "RemoveContainer" containerID="78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.545684 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77c8412e-f3e1-4ae3-bed4-53cbbf092011-serving-cert\") pod \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.545735 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-config\") pod \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.545787 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-client-ca\") pod \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.545835 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjbq9\" (UniqueName: \"kubernetes.io/projected/77c8412e-f3e1-4ae3-bed4-53cbbf092011-kube-api-access-bjbq9\") pod \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\" (UID: \"77c8412e-f3e1-4ae3-bed4-53cbbf092011\") " Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.549156 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-config" (OuterVolumeSpecName: "config") pod "77c8412e-f3e1-4ae3-bed4-53cbbf092011" (UID: "77c8412e-f3e1-4ae3-bed4-53cbbf092011"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.549263 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-client-ca" (OuterVolumeSpecName: "client-ca") pod "77c8412e-f3e1-4ae3-bed4-53cbbf092011" (UID: "77c8412e-f3e1-4ae3-bed4-53cbbf092011"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.555928 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77c8412e-f3e1-4ae3-bed4-53cbbf092011-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "77c8412e-f3e1-4ae3-bed4-53cbbf092011" (UID: "77c8412e-f3e1-4ae3-bed4-53cbbf092011"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.555966 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77c8412e-f3e1-4ae3-bed4-53cbbf092011-kube-api-access-bjbq9" (OuterVolumeSpecName: "kube-api-access-bjbq9") pod "77c8412e-f3e1-4ae3-bed4-53cbbf092011" (UID: "77c8412e-f3e1-4ae3-bed4-53cbbf092011"). InnerVolumeSpecName "kube-api-access-bjbq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.565105 4916 scope.go:117] "RemoveContainer" containerID="78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b" Dec 03 19:35:12 crc kubenswrapper[4916]: E1203 19:35:12.565621 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b\": container with ID starting with 78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b not found: ID does not exist" containerID="78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.565682 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b"} err="failed to get container status \"78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b\": rpc error: code = NotFound desc = could not find container \"78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b\": container with ID starting with 78ad798f570142476b21c879a01cc4fda1e80c4fd058c497fbc94af04968a52b not found: ID does not exist" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.647790 4916 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77c8412e-f3e1-4ae3-bed4-53cbbf092011-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.647864 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.647874 4916 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/77c8412e-f3e1-4ae3-bed4-53cbbf092011-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.647883 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjbq9\" (UniqueName: \"kubernetes.io/projected/77c8412e-f3e1-4ae3-bed4-53cbbf092011-kube-api-access-bjbq9\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.873054 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs"] Dec 03 19:35:12 crc kubenswrapper[4916]: I1203 19:35:12.877739 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-866f46fcdc-8q4zs"] Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.082852 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq"] Dec 03 19:35:13 crc kubenswrapper[4916]: E1203 19:35:13.083052 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77c8412e-f3e1-4ae3-bed4-53cbbf092011" containerName="route-controller-manager" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.083063 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="77c8412e-f3e1-4ae3-bed4-53cbbf092011" containerName="route-controller-manager" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.083159 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="77c8412e-f3e1-4ae3-bed4-53cbbf092011" containerName="route-controller-manager" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.083505 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.086354 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.086798 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.087474 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.087862 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.088317 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.089624 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.105673 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq"] Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.154336 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bvjt\" (UniqueName: \"kubernetes.io/projected/17a60f2b-dc81-4994-9cb7-d1e3426c2169-kube-api-access-8bvjt\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.154423 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17a60f2b-dc81-4994-9cb7-d1e3426c2169-serving-cert\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.154449 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17a60f2b-dc81-4994-9cb7-d1e3426c2169-config\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.154476 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17a60f2b-dc81-4994-9cb7-d1e3426c2169-client-ca\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.255094 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bvjt\" (UniqueName: \"kubernetes.io/projected/17a60f2b-dc81-4994-9cb7-d1e3426c2169-kube-api-access-8bvjt\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.255189 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17a60f2b-dc81-4994-9cb7-d1e3426c2169-serving-cert\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.255212 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17a60f2b-dc81-4994-9cb7-d1e3426c2169-config\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.255232 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17a60f2b-dc81-4994-9cb7-d1e3426c2169-client-ca\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.256181 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17a60f2b-dc81-4994-9cb7-d1e3426c2169-client-ca\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.256588 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17a60f2b-dc81-4994-9cb7-d1e3426c2169-config\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.260875 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17a60f2b-dc81-4994-9cb7-d1e3426c2169-serving-cert\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.285919 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bvjt\" (UniqueName: \"kubernetes.io/projected/17a60f2b-dc81-4994-9cb7-d1e3426c2169-kube-api-access-8bvjt\") pod \"route-controller-manager-668d44757-nb5wq\" (UID: \"17a60f2b-dc81-4994-9cb7-d1e3426c2169\") " pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.400187 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:13 crc kubenswrapper[4916]: I1203 19:35:13.895895 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq"] Dec 03 19:35:13 crc kubenswrapper[4916]: W1203 19:35:13.907997 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17a60f2b_dc81_4994_9cb7_d1e3426c2169.slice/crio-7dbc0aa5e1518123076b64e64082394c45a850f4b240b0e1b010530db9cf78e1 WatchSource:0}: Error finding container 7dbc0aa5e1518123076b64e64082394c45a850f4b240b0e1b010530db9cf78e1: Status 404 returned error can't find the container with id 7dbc0aa5e1518123076b64e64082394c45a850f4b240b0e1b010530db9cf78e1 Dec 03 19:35:14 crc kubenswrapper[4916]: I1203 19:35:14.488169 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77c8412e-f3e1-4ae3-bed4-53cbbf092011" path="/var/lib/kubelet/pods/77c8412e-f3e1-4ae3-bed4-53cbbf092011/volumes" Dec 03 19:35:14 crc kubenswrapper[4916]: I1203 19:35:14.560063 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" event={"ID":"17a60f2b-dc81-4994-9cb7-d1e3426c2169","Type":"ContainerStarted","Data":"4e42ae6a0d8596d0af61b73884cfcc6132a518f43060b734fd73471501770bf9"} Dec 03 19:35:14 crc kubenswrapper[4916]: I1203 19:35:14.560114 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" event={"ID":"17a60f2b-dc81-4994-9cb7-d1e3426c2169","Type":"ContainerStarted","Data":"7dbc0aa5e1518123076b64e64082394c45a850f4b240b0e1b010530db9cf78e1"} Dec 03 19:35:14 crc kubenswrapper[4916]: I1203 19:35:14.560495 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:14 crc kubenswrapper[4916]: I1203 19:35:14.569506 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" Dec 03 19:35:14 crc kubenswrapper[4916]: I1203 19:35:14.607064 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-668d44757-nb5wq" podStartSLOduration=3.607041727 podStartE2EDuration="3.607041727s" podCreationTimestamp="2025-12-03 19:35:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:35:14.580204805 +0000 UTC m=+330.543015071" watchObservedRunningTime="2025-12-03 19:35:14.607041727 +0000 UTC m=+330.569852003" Dec 03 19:35:16 crc kubenswrapper[4916]: I1203 19:35:16.158738 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:35:16 crc kubenswrapper[4916]: I1203 19:35:16.158833 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.701331 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gmv88"] Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.703285 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.729965 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gmv88"] Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.816018 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cb73dbda-721b-4d9d-86d7-e1855ae9273f-registry-certificates\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.816077 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cb73dbda-721b-4d9d-86d7-e1855ae9273f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.816112 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cb73dbda-721b-4d9d-86d7-e1855ae9273f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.816148 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cb73dbda-721b-4d9d-86d7-e1855ae9273f-registry-tls\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.816170 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm8p5\" (UniqueName: \"kubernetes.io/projected/cb73dbda-721b-4d9d-86d7-e1855ae9273f-kube-api-access-qm8p5\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.816206 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.816243 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cb73dbda-721b-4d9d-86d7-e1855ae9273f-bound-sa-token\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.816270 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb73dbda-721b-4d9d-86d7-e1855ae9273f-trusted-ca\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.854155 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.917520 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cb73dbda-721b-4d9d-86d7-e1855ae9273f-registry-certificates\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.917615 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cb73dbda-721b-4d9d-86d7-e1855ae9273f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.917647 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cb73dbda-721b-4d9d-86d7-e1855ae9273f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.917683 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cb73dbda-721b-4d9d-86d7-e1855ae9273f-registry-tls\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.917705 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm8p5\" (UniqueName: \"kubernetes.io/projected/cb73dbda-721b-4d9d-86d7-e1855ae9273f-kube-api-access-qm8p5\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.917740 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cb73dbda-721b-4d9d-86d7-e1855ae9273f-bound-sa-token\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.917765 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb73dbda-721b-4d9d-86d7-e1855ae9273f-trusted-ca\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.919047 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb73dbda-721b-4d9d-86d7-e1855ae9273f-trusted-ca\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.919082 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cb73dbda-721b-4d9d-86d7-e1855ae9273f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.920033 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cb73dbda-721b-4d9d-86d7-e1855ae9273f-registry-certificates\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.929178 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cb73dbda-721b-4d9d-86d7-e1855ae9273f-registry-tls\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.929801 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cb73dbda-721b-4d9d-86d7-e1855ae9273f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.942772 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm8p5\" (UniqueName: \"kubernetes.io/projected/cb73dbda-721b-4d9d-86d7-e1855ae9273f-kube-api-access-qm8p5\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:32 crc kubenswrapper[4916]: I1203 19:35:32.948422 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cb73dbda-721b-4d9d-86d7-e1855ae9273f-bound-sa-token\") pod \"image-registry-66df7c8f76-gmv88\" (UID: \"cb73dbda-721b-4d9d-86d7-e1855ae9273f\") " pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:33 crc kubenswrapper[4916]: I1203 19:35:33.030193 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:33 crc kubenswrapper[4916]: I1203 19:35:33.472296 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gmv88"] Dec 03 19:35:33 crc kubenswrapper[4916]: I1203 19:35:33.684072 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" event={"ID":"cb73dbda-721b-4d9d-86d7-e1855ae9273f","Type":"ContainerStarted","Data":"cd9f086a90b65ddfdb64c4221c59fdb94a9c4617253d6b62e8c83f6f871e4faf"} Dec 03 19:35:33 crc kubenswrapper[4916]: I1203 19:35:33.684118 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" event={"ID":"cb73dbda-721b-4d9d-86d7-e1855ae9273f","Type":"ContainerStarted","Data":"85a9a8d7799618a211327f6bce58766f7920ea6778b246e04eec707503e30078"} Dec 03 19:35:33 crc kubenswrapper[4916]: I1203 19:35:33.684413 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:33 crc kubenswrapper[4916]: I1203 19:35:33.714141 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" podStartSLOduration=1.714120603 podStartE2EDuration="1.714120603s" podCreationTimestamp="2025-12-03 19:35:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:35:33.710387956 +0000 UTC m=+349.673198222" watchObservedRunningTime="2025-12-03 19:35:33.714120603 +0000 UTC m=+349.676930869" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.159033 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.159535 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.523351 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r4lq2"] Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.523654 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-r4lq2" podUID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" containerName="registry-server" containerID="cri-o://a66b2170fa84a792a5940f775b39985b2543623f633d7683ddbf7dcf6f26679a" gracePeriod=30 Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.543983 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pvd7g"] Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.544205 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pvd7g" podUID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" containerName="registry-server" containerID="cri-o://2eaa495cc93cf41eed15c410ffd15cd17f6af02dbec6bd3c5540ee2c7b498ae0" gracePeriod=30 Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.551622 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w86mh"] Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.551928 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" podUID="ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c" containerName="marketplace-operator" containerID="cri-o://5d3191276f07e1bfdd5cfb302237b14bf400246ac78007d6cd08968951944787" gracePeriod=30 Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.555149 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h8kd6"] Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.560688 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-h8kd6" podUID="ac59f68d-2f62-419c-9244-ea9ee95242f8" containerName="registry-server" containerID="cri-o://171e51e86dc4083082b5bd0083ebba551d7bddc455a13319370d0305733ea061" gracePeriod=30 Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.567995 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n2hmt"] Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.568244 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n2hmt" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerName="registry-server" containerID="cri-o://7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c" gracePeriod=30 Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.580685 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rpvxq"] Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.581507 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.597320 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rpvxq"] Dec 03 19:35:46 crc kubenswrapper[4916]: E1203 19:35:46.625726 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c is running failed: container process not found" containerID="7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 19:35:46 crc kubenswrapper[4916]: E1203 19:35:46.626180 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c is running failed: container process not found" containerID="7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 19:35:46 crc kubenswrapper[4916]: E1203 19:35:46.626606 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c is running failed: container process not found" containerID="7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c" cmd=["grpc_health_probe","-addr=:50051"] Dec 03 19:35:46 crc kubenswrapper[4916]: E1203 19:35:46.626643 4916 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-n2hmt" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerName="registry-server" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.714960 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvrgv\" (UniqueName: \"kubernetes.io/projected/23722fcc-7804-4705-9180-4dbd53a7e0e9-kube-api-access-mvrgv\") pod \"marketplace-operator-79b997595-rpvxq\" (UID: \"23722fcc-7804-4705-9180-4dbd53a7e0e9\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.715073 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/23722fcc-7804-4705-9180-4dbd53a7e0e9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rpvxq\" (UID: \"23722fcc-7804-4705-9180-4dbd53a7e0e9\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.715108 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/23722fcc-7804-4705-9180-4dbd53a7e0e9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rpvxq\" (UID: \"23722fcc-7804-4705-9180-4dbd53a7e0e9\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.788493 4916 generic.go:334] "Generic (PLEG): container finished" podID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerID="7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c" exitCode=0 Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.788586 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n2hmt" event={"ID":"4e7df06a-1d77-40e3-916c-581b46b747eb","Type":"ContainerDied","Data":"7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c"} Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.789952 4916 generic.go:334] "Generic (PLEG): container finished" podID="ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c" containerID="5d3191276f07e1bfdd5cfb302237b14bf400246ac78007d6cd08968951944787" exitCode=0 Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.789996 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" event={"ID":"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c","Type":"ContainerDied","Data":"5d3191276f07e1bfdd5cfb302237b14bf400246ac78007d6cd08968951944787"} Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.792171 4916 generic.go:334] "Generic (PLEG): container finished" podID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" containerID="2eaa495cc93cf41eed15c410ffd15cd17f6af02dbec6bd3c5540ee2c7b498ae0" exitCode=0 Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.792213 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvd7g" event={"ID":"0551f9a0-8ac5-4b28-bf49-b507428e6b05","Type":"ContainerDied","Data":"2eaa495cc93cf41eed15c410ffd15cd17f6af02dbec6bd3c5540ee2c7b498ae0"} Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.793824 4916 generic.go:334] "Generic (PLEG): container finished" podID="ac59f68d-2f62-419c-9244-ea9ee95242f8" containerID="171e51e86dc4083082b5bd0083ebba551d7bddc455a13319370d0305733ea061" exitCode=0 Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.793866 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h8kd6" event={"ID":"ac59f68d-2f62-419c-9244-ea9ee95242f8","Type":"ContainerDied","Data":"171e51e86dc4083082b5bd0083ebba551d7bddc455a13319370d0305733ea061"} Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.795778 4916 generic.go:334] "Generic (PLEG): container finished" podID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" containerID="a66b2170fa84a792a5940f775b39985b2543623f633d7683ddbf7dcf6f26679a" exitCode=0 Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.795807 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4lq2" event={"ID":"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe","Type":"ContainerDied","Data":"a66b2170fa84a792a5940f775b39985b2543623f633d7683ddbf7dcf6f26679a"} Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.816198 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/23722fcc-7804-4705-9180-4dbd53a7e0e9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rpvxq\" (UID: \"23722fcc-7804-4705-9180-4dbd53a7e0e9\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.816267 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvrgv\" (UniqueName: \"kubernetes.io/projected/23722fcc-7804-4705-9180-4dbd53a7e0e9-kube-api-access-mvrgv\") pod \"marketplace-operator-79b997595-rpvxq\" (UID: \"23722fcc-7804-4705-9180-4dbd53a7e0e9\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.816313 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/23722fcc-7804-4705-9180-4dbd53a7e0e9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rpvxq\" (UID: \"23722fcc-7804-4705-9180-4dbd53a7e0e9\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.820336 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/23722fcc-7804-4705-9180-4dbd53a7e0e9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rpvxq\" (UID: \"23722fcc-7804-4705-9180-4dbd53a7e0e9\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.824170 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/23722fcc-7804-4705-9180-4dbd53a7e0e9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rpvxq\" (UID: \"23722fcc-7804-4705-9180-4dbd53a7e0e9\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.836180 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvrgv\" (UniqueName: \"kubernetes.io/projected/23722fcc-7804-4705-9180-4dbd53a7e0e9-kube-api-access-mvrgv\") pod \"marketplace-operator-79b997595-rpvxq\" (UID: \"23722fcc-7804-4705-9180-4dbd53a7e0e9\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.906794 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:46 crc kubenswrapper[4916]: I1203 19:35:46.994433 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.054042 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.054260 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.058616 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.067036 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.119323 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-catalog-content\") pod \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.119382 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-utilities\") pod \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.119452 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ptx2\" (UniqueName: \"kubernetes.io/projected/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-kube-api-access-5ptx2\") pod \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\" (UID: \"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.121259 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-utilities" (OuterVolumeSpecName: "utilities") pod "83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" (UID: "83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.123201 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-kube-api-access-5ptx2" (OuterVolumeSpecName: "kube-api-access-5ptx2") pod "83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" (UID: "83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe"). InnerVolumeSpecName "kube-api-access-5ptx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.189275 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" (UID: "83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.220702 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-catalog-content\") pod \"4e7df06a-1d77-40e3-916c-581b46b747eb\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.220793 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8rp4\" (UniqueName: \"kubernetes.io/projected/0551f9a0-8ac5-4b28-bf49-b507428e6b05-kube-api-access-l8rp4\") pod \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.220835 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-catalog-content\") pod \"ac59f68d-2f62-419c-9244-ea9ee95242f8\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.220853 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v562m\" (UniqueName: \"kubernetes.io/projected/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-kube-api-access-v562m\") pod \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.220886 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-trusted-ca\") pod \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.220924 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mshqj\" (UniqueName: \"kubernetes.io/projected/ac59f68d-2f62-419c-9244-ea9ee95242f8-kube-api-access-mshqj\") pod \"ac59f68d-2f62-419c-9244-ea9ee95242f8\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.220954 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-operator-metrics\") pod \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\" (UID: \"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.221000 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-utilities\") pod \"ac59f68d-2f62-419c-9244-ea9ee95242f8\" (UID: \"ac59f68d-2f62-419c-9244-ea9ee95242f8\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.221018 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-utilities\") pod \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.221036 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rstx6\" (UniqueName: \"kubernetes.io/projected/4e7df06a-1d77-40e3-916c-581b46b747eb-kube-api-access-rstx6\") pod \"4e7df06a-1d77-40e3-916c-581b46b747eb\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.221057 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-catalog-content\") pod \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\" (UID: \"0551f9a0-8ac5-4b28-bf49-b507428e6b05\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.221084 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-utilities\") pod \"4e7df06a-1d77-40e3-916c-581b46b747eb\" (UID: \"4e7df06a-1d77-40e3-916c-581b46b747eb\") " Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.221394 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.221410 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.221420 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ptx2\" (UniqueName: \"kubernetes.io/projected/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe-kube-api-access-5ptx2\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.221751 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c" (UID: "ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.222236 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-utilities" (OuterVolumeSpecName: "utilities") pod "ac59f68d-2f62-419c-9244-ea9ee95242f8" (UID: "ac59f68d-2f62-419c-9244-ea9ee95242f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.222271 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-utilities" (OuterVolumeSpecName: "utilities") pod "0551f9a0-8ac5-4b28-bf49-b507428e6b05" (UID: "0551f9a0-8ac5-4b28-bf49-b507428e6b05"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.223562 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0551f9a0-8ac5-4b28-bf49-b507428e6b05-kube-api-access-l8rp4" (OuterVolumeSpecName: "kube-api-access-l8rp4") pod "0551f9a0-8ac5-4b28-bf49-b507428e6b05" (UID: "0551f9a0-8ac5-4b28-bf49-b507428e6b05"). InnerVolumeSpecName "kube-api-access-l8rp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.223693 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-kube-api-access-v562m" (OuterVolumeSpecName: "kube-api-access-v562m") pod "ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c" (UID: "ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c"). InnerVolumeSpecName "kube-api-access-v562m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.224460 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e7df06a-1d77-40e3-916c-581b46b747eb-kube-api-access-rstx6" (OuterVolumeSpecName: "kube-api-access-rstx6") pod "4e7df06a-1d77-40e3-916c-581b46b747eb" (UID: "4e7df06a-1d77-40e3-916c-581b46b747eb"). InnerVolumeSpecName "kube-api-access-rstx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.224628 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c" (UID: "ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.225321 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac59f68d-2f62-419c-9244-ea9ee95242f8-kube-api-access-mshqj" (OuterVolumeSpecName: "kube-api-access-mshqj") pod "ac59f68d-2f62-419c-9244-ea9ee95242f8" (UID: "ac59f68d-2f62-419c-9244-ea9ee95242f8"). InnerVolumeSpecName "kube-api-access-mshqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.228114 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-utilities" (OuterVolumeSpecName: "utilities") pod "4e7df06a-1d77-40e3-916c-581b46b747eb" (UID: "4e7df06a-1d77-40e3-916c-581b46b747eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.247532 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac59f68d-2f62-419c-9244-ea9ee95242f8" (UID: "ac59f68d-2f62-419c-9244-ea9ee95242f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.273104 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0551f9a0-8ac5-4b28-bf49-b507428e6b05" (UID: "0551f9a0-8ac5-4b28-bf49-b507428e6b05"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.322110 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mshqj\" (UniqueName: \"kubernetes.io/projected/ac59f68d-2f62-419c-9244-ea9ee95242f8-kube-api-access-mshqj\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.322168 4916 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.322179 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.322189 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.322198 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rstx6\" (UniqueName: \"kubernetes.io/projected/4e7df06a-1d77-40e3-916c-581b46b747eb-kube-api-access-rstx6\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.322206 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0551f9a0-8ac5-4b28-bf49-b507428e6b05-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.322215 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.322223 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8rp4\" (UniqueName: \"kubernetes.io/projected/0551f9a0-8ac5-4b28-bf49-b507428e6b05-kube-api-access-l8rp4\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.322231 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac59f68d-2f62-419c-9244-ea9ee95242f8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.322239 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v562m\" (UniqueName: \"kubernetes.io/projected/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-kube-api-access-v562m\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.322247 4916 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.327049 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4e7df06a-1d77-40e3-916c-581b46b747eb" (UID: "4e7df06a-1d77-40e3-916c-581b46b747eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.349902 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rpvxq"] Dec 03 19:35:47 crc kubenswrapper[4916]: W1203 19:35:47.355825 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23722fcc_7804_4705_9180_4dbd53a7e0e9.slice/crio-82fb10b19c34b210b76e03512290a20a878ebc14fa2c9f1e36e71baf0c197686 WatchSource:0}: Error finding container 82fb10b19c34b210b76e03512290a20a878ebc14fa2c9f1e36e71baf0c197686: Status 404 returned error can't find the container with id 82fb10b19c34b210b76e03512290a20a878ebc14fa2c9f1e36e71baf0c197686 Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.423323 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e7df06a-1d77-40e3-916c-581b46b747eb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.802749 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4lq2" event={"ID":"83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe","Type":"ContainerDied","Data":"1e030ea98f92cf6e44712d1f4398afd0757835cd341c491d8a14a13afb7d9c55"} Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.803100 4916 scope.go:117] "RemoveContainer" containerID="a66b2170fa84a792a5940f775b39985b2543623f633d7683ddbf7dcf6f26679a" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.803238 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r4lq2" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.806343 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n2hmt" event={"ID":"4e7df06a-1d77-40e3-916c-581b46b747eb","Type":"ContainerDied","Data":"03425f39f8e308665b4fc62a5d9e5e2f52ff7f14181843c2ac095d3a09d9d002"} Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.806356 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n2hmt" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.807980 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.810619 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w86mh" event={"ID":"ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c","Type":"ContainerDied","Data":"dfca687e34d310659babf8ccabbbd7afe1656a9487815dcb9ac6f3e9be67cd1d"} Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.812520 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pvd7g" event={"ID":"0551f9a0-8ac5-4b28-bf49-b507428e6b05","Type":"ContainerDied","Data":"dcb9c011eac2ff4f5753423e044753f4f2f2e075e39378f6fed7615a74d3fb27"} Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.812597 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pvd7g" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.820512 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h8kd6" event={"ID":"ac59f68d-2f62-419c-9244-ea9ee95242f8","Type":"ContainerDied","Data":"32bb40770fc2c133476353d68167708995c682da63c540a09aa941bcc9d83460"} Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.820633 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h8kd6" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.824180 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" event={"ID":"23722fcc-7804-4705-9180-4dbd53a7e0e9","Type":"ContainerStarted","Data":"3eb1709b998a1e50437e2b9bab43eea508275963a9c772ccb652a0e2380d7370"} Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.824212 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" event={"ID":"23722fcc-7804-4705-9180-4dbd53a7e0e9","Type":"ContainerStarted","Data":"82fb10b19c34b210b76e03512290a20a878ebc14fa2c9f1e36e71baf0c197686"} Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.824547 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.831627 4916 scope.go:117] "RemoveContainer" containerID="3b5d7f957e99b462249f18907b8abfdf06a3507dc9f4ad7d2481210fd16a9887" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.845895 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.852338 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-rpvxq" podStartSLOduration=1.852320583 podStartE2EDuration="1.852320583s" podCreationTimestamp="2025-12-03 19:35:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:35:47.848132054 +0000 UTC m=+363.810942340" watchObservedRunningTime="2025-12-03 19:35:47.852320583 +0000 UTC m=+363.815130849" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.868893 4916 scope.go:117] "RemoveContainer" containerID="acc63faaaeabde4a8abd03def0004f85ad5b0cf1b7edf047b0ac6328643cc1c1" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.869614 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pvd7g"] Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.881274 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pvd7g"] Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.887478 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r4lq2"] Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.893901 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-r4lq2"] Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.905330 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w86mh"] Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.905816 4916 scope.go:117] "RemoveContainer" containerID="7bd22fe66f6fad12d2c74144bc37d436fa0cde7a108d84d650fde9fcddfb7c6c" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.909640 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w86mh"] Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.913694 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n2hmt"] Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.928493 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n2hmt"] Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.933004 4916 scope.go:117] "RemoveContainer" containerID="7006a8bce0318bcf64f7c7c7dabde054121e639e0fb0bf2a47a7bda0fc9af0f9" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.965389 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h8kd6"] Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.967998 4916 scope.go:117] "RemoveContainer" containerID="4248ddf150c4aa6c2dfba229d45580d48245292c249b26e2868955912c345251" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.970332 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-h8kd6"] Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.980919 4916 scope.go:117] "RemoveContainer" containerID="5d3191276f07e1bfdd5cfb302237b14bf400246ac78007d6cd08968951944787" Dec 03 19:35:47 crc kubenswrapper[4916]: I1203 19:35:47.991952 4916 scope.go:117] "RemoveContainer" containerID="2eaa495cc93cf41eed15c410ffd15cd17f6af02dbec6bd3c5540ee2c7b498ae0" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.007624 4916 scope.go:117] "RemoveContainer" containerID="639162285b4fbaa5ffdb85d76bf427ac8b5a98f3eb69a0ec6f7b15b2f3f588f5" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.024550 4916 scope.go:117] "RemoveContainer" containerID="7cdeaabd72c78450e053c4a902ee80dcc6fbd3b900d8a795f1cb5f84426fe646" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.036690 4916 scope.go:117] "RemoveContainer" containerID="171e51e86dc4083082b5bd0083ebba551d7bddc455a13319370d0305733ea061" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.046521 4916 scope.go:117] "RemoveContainer" containerID="1dd27cf7911a1ea172b8f3d72d049fbde892519b24d52153137b8591909844d3" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.057139 4916 scope.go:117] "RemoveContainer" containerID="0062423d42bac3e43599598c3863e500a7d9c40be5a512428982869a20d48004" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.490506 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" path="/var/lib/kubelet/pods/0551f9a0-8ac5-4b28-bf49-b507428e6b05/volumes" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.494004 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" path="/var/lib/kubelet/pods/4e7df06a-1d77-40e3-916c-581b46b747eb/volumes" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.495268 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" path="/var/lib/kubelet/pods/83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe/volumes" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.497721 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac59f68d-2f62-419c-9244-ea9ee95242f8" path="/var/lib/kubelet/pods/ac59f68d-2f62-419c-9244-ea9ee95242f8/volumes" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.499224 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c" path="/var/lib/kubelet/pods/ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c/volumes" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.738877 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w94d6"] Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739368 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" containerName="extract-content" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739380 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" containerName="extract-content" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739389 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c" containerName="marketplace-operator" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739395 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c" containerName="marketplace-operator" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739403 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac59f68d-2f62-419c-9244-ea9ee95242f8" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739409 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac59f68d-2f62-419c-9244-ea9ee95242f8" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739417 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739423 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739429 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739434 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739442 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" containerName="extract-content" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739447 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" containerName="extract-content" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739457 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739464 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739472 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerName="extract-content" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739477 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerName="extract-content" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739486 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerName="extract-utilities" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739492 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerName="extract-utilities" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739499 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" containerName="extract-utilities" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739505 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" containerName="extract-utilities" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739512 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac59f68d-2f62-419c-9244-ea9ee95242f8" containerName="extract-utilities" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739518 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac59f68d-2f62-419c-9244-ea9ee95242f8" containerName="extract-utilities" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739525 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" containerName="extract-utilities" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739531 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" containerName="extract-utilities" Dec 03 19:35:48 crc kubenswrapper[4916]: E1203 19:35:48.739539 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac59f68d-2f62-419c-9244-ea9ee95242f8" containerName="extract-content" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739544 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac59f68d-2f62-419c-9244-ea9ee95242f8" containerName="extract-content" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739645 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce6deb9f-2ba1-4efc-96ec-ebd3437b9b5c" containerName="marketplace-operator" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739655 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="0551f9a0-8ac5-4b28-bf49-b507428e6b05" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739664 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac59f68d-2f62-419c-9244-ea9ee95242f8" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739672 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e7df06a-1d77-40e3-916c-581b46b747eb" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.739680 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="83b8e59d-6c7f-4ee7-836b-ae7c6ca444fe" containerName="registry-server" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.741056 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.747132 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.755550 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w94d6"] Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.839012 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b2ecd5f-6381-4928-a111-7e17927c6096-catalog-content\") pod \"redhat-marketplace-w94d6\" (UID: \"3b2ecd5f-6381-4928-a111-7e17927c6096\") " pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.839047 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b2ecd5f-6381-4928-a111-7e17927c6096-utilities\") pod \"redhat-marketplace-w94d6\" (UID: \"3b2ecd5f-6381-4928-a111-7e17927c6096\") " pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.839074 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2ps7\" (UniqueName: \"kubernetes.io/projected/3b2ecd5f-6381-4928-a111-7e17927c6096-kube-api-access-l2ps7\") pod \"redhat-marketplace-w94d6\" (UID: \"3b2ecd5f-6381-4928-a111-7e17927c6096\") " pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.938031 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rrq2j"] Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.939037 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.939905 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2ps7\" (UniqueName: \"kubernetes.io/projected/3b2ecd5f-6381-4928-a111-7e17927c6096-kube-api-access-l2ps7\") pod \"redhat-marketplace-w94d6\" (UID: \"3b2ecd5f-6381-4928-a111-7e17927c6096\") " pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.940016 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b2ecd5f-6381-4928-a111-7e17927c6096-catalog-content\") pod \"redhat-marketplace-w94d6\" (UID: \"3b2ecd5f-6381-4928-a111-7e17927c6096\") " pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.940043 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b2ecd5f-6381-4928-a111-7e17927c6096-utilities\") pod \"redhat-marketplace-w94d6\" (UID: \"3b2ecd5f-6381-4928-a111-7e17927c6096\") " pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.941359 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b2ecd5f-6381-4928-a111-7e17927c6096-catalog-content\") pod \"redhat-marketplace-w94d6\" (UID: \"3b2ecd5f-6381-4928-a111-7e17927c6096\") " pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.941413 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b2ecd5f-6381-4928-a111-7e17927c6096-utilities\") pod \"redhat-marketplace-w94d6\" (UID: \"3b2ecd5f-6381-4928-a111-7e17927c6096\") " pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.941508 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.948183 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rrq2j"] Dec 03 19:35:48 crc kubenswrapper[4916]: I1203 19:35:48.967430 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2ps7\" (UniqueName: \"kubernetes.io/projected/3b2ecd5f-6381-4928-a111-7e17927c6096-kube-api-access-l2ps7\") pod \"redhat-marketplace-w94d6\" (UID: \"3b2ecd5f-6381-4928-a111-7e17927c6096\") " pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.041348 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqgh2\" (UniqueName: \"kubernetes.io/projected/a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73-kube-api-access-hqgh2\") pod \"redhat-operators-rrq2j\" (UID: \"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73\") " pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.041404 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73-utilities\") pod \"redhat-operators-rrq2j\" (UID: \"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73\") " pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.041468 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73-catalog-content\") pod \"redhat-operators-rrq2j\" (UID: \"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73\") " pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.112808 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.143142 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73-catalog-content\") pod \"redhat-operators-rrq2j\" (UID: \"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73\") " pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.143195 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqgh2\" (UniqueName: \"kubernetes.io/projected/a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73-kube-api-access-hqgh2\") pod \"redhat-operators-rrq2j\" (UID: \"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73\") " pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.143229 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73-utilities\") pod \"redhat-operators-rrq2j\" (UID: \"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73\") " pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.143849 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73-catalog-content\") pod \"redhat-operators-rrq2j\" (UID: \"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73\") " pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.145687 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73-utilities\") pod \"redhat-operators-rrq2j\" (UID: \"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73\") " pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.159313 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqgh2\" (UniqueName: \"kubernetes.io/projected/a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73-kube-api-access-hqgh2\") pod \"redhat-operators-rrq2j\" (UID: \"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73\") " pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.265893 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.507257 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w94d6"] Dec 03 19:35:49 crc kubenswrapper[4916]: W1203 19:35:49.513765 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b2ecd5f_6381_4928_a111_7e17927c6096.slice/crio-ad82d53763a2ffb0563b032a54a55c4b92c229fa8eac10d8aa9e6bfeda09a00c WatchSource:0}: Error finding container ad82d53763a2ffb0563b032a54a55c4b92c229fa8eac10d8aa9e6bfeda09a00c: Status 404 returned error can't find the container with id ad82d53763a2ffb0563b032a54a55c4b92c229fa8eac10d8aa9e6bfeda09a00c Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.644615 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rrq2j"] Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.877208 4916 generic.go:334] "Generic (PLEG): container finished" podID="a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73" containerID="12695b478ff21ec5f94416124f24c7ade4e6aa43e8db90b17d4182ef525f539f" exitCode=0 Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.877296 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrq2j" event={"ID":"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73","Type":"ContainerDied","Data":"12695b478ff21ec5f94416124f24c7ade4e6aa43e8db90b17d4182ef525f539f"} Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.877322 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrq2j" event={"ID":"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73","Type":"ContainerStarted","Data":"078fecc7ef6dcc1924930a8f0e61bf848d0c55beeb7d3a7309a55fdafc21dc66"} Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.880055 4916 generic.go:334] "Generic (PLEG): container finished" podID="3b2ecd5f-6381-4928-a111-7e17927c6096" containerID="64dee903ba0d0f2b5c8d3b7dd122207ad53f5f9a00d3c927847186159cf0add7" exitCode=0 Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.880407 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w94d6" event={"ID":"3b2ecd5f-6381-4928-a111-7e17927c6096","Type":"ContainerDied","Data":"64dee903ba0d0f2b5c8d3b7dd122207ad53f5f9a00d3c927847186159cf0add7"} Dec 03 19:35:49 crc kubenswrapper[4916]: I1203 19:35:49.880426 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w94d6" event={"ID":"3b2ecd5f-6381-4928-a111-7e17927c6096","Type":"ContainerStarted","Data":"ad82d53763a2ffb0563b032a54a55c4b92c229fa8eac10d8aa9e6bfeda09a00c"} Dec 03 19:35:50 crc kubenswrapper[4916]: I1203 19:35:50.888452 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrq2j" event={"ID":"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73","Type":"ContainerStarted","Data":"479bc5a07b9c98e67227fbbd243769491a2a54dd899a690a413929e04adad779"} Dec 03 19:35:50 crc kubenswrapper[4916]: I1203 19:35:50.892075 4916 generic.go:334] "Generic (PLEG): container finished" podID="3b2ecd5f-6381-4928-a111-7e17927c6096" containerID="0899a7dc809669c2bb447c1a3570aeb0ff17d7d4e3b7ea922f00a35cbfeb1be5" exitCode=0 Dec 03 19:35:50 crc kubenswrapper[4916]: I1203 19:35:50.892142 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w94d6" event={"ID":"3b2ecd5f-6381-4928-a111-7e17927c6096","Type":"ContainerDied","Data":"0899a7dc809669c2bb447c1a3570aeb0ff17d7d4e3b7ea922f00a35cbfeb1be5"} Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.152814 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h82r9"] Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.154846 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.157914 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.158905 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h82r9"] Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.297272 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/148fa9af-6094-448d-9c20-267ce0e3b04f-utilities\") pod \"certified-operators-h82r9\" (UID: \"148fa9af-6094-448d-9c20-267ce0e3b04f\") " pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.297312 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/148fa9af-6094-448d-9c20-267ce0e3b04f-catalog-content\") pod \"certified-operators-h82r9\" (UID: \"148fa9af-6094-448d-9c20-267ce0e3b04f\") " pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.297390 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqg5m\" (UniqueName: \"kubernetes.io/projected/148fa9af-6094-448d-9c20-267ce0e3b04f-kube-api-access-nqg5m\") pod \"certified-operators-h82r9\" (UID: \"148fa9af-6094-448d-9c20-267ce0e3b04f\") " pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.348053 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gwmvs"] Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.349293 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.352598 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.363495 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gwmvs"] Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.398115 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzggh\" (UniqueName: \"kubernetes.io/projected/074f71e8-1f93-48a6-9777-4d6450cd4989-kube-api-access-jzggh\") pod \"community-operators-gwmvs\" (UID: \"074f71e8-1f93-48a6-9777-4d6450cd4989\") " pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.398421 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/148fa9af-6094-448d-9c20-267ce0e3b04f-utilities\") pod \"certified-operators-h82r9\" (UID: \"148fa9af-6094-448d-9c20-267ce0e3b04f\") " pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.398447 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/148fa9af-6094-448d-9c20-267ce0e3b04f-catalog-content\") pod \"certified-operators-h82r9\" (UID: \"148fa9af-6094-448d-9c20-267ce0e3b04f\") " pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.398498 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/074f71e8-1f93-48a6-9777-4d6450cd4989-catalog-content\") pod \"community-operators-gwmvs\" (UID: \"074f71e8-1f93-48a6-9777-4d6450cd4989\") " pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.398519 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/074f71e8-1f93-48a6-9777-4d6450cd4989-utilities\") pod \"community-operators-gwmvs\" (UID: \"074f71e8-1f93-48a6-9777-4d6450cd4989\") " pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.398551 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqg5m\" (UniqueName: \"kubernetes.io/projected/148fa9af-6094-448d-9c20-267ce0e3b04f-kube-api-access-nqg5m\") pod \"certified-operators-h82r9\" (UID: \"148fa9af-6094-448d-9c20-267ce0e3b04f\") " pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.398965 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/148fa9af-6094-448d-9c20-267ce0e3b04f-catalog-content\") pod \"certified-operators-h82r9\" (UID: \"148fa9af-6094-448d-9c20-267ce0e3b04f\") " pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.399235 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/148fa9af-6094-448d-9c20-267ce0e3b04f-utilities\") pod \"certified-operators-h82r9\" (UID: \"148fa9af-6094-448d-9c20-267ce0e3b04f\") " pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.421305 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqg5m\" (UniqueName: \"kubernetes.io/projected/148fa9af-6094-448d-9c20-267ce0e3b04f-kube-api-access-nqg5m\") pod \"certified-operators-h82r9\" (UID: \"148fa9af-6094-448d-9c20-267ce0e3b04f\") " pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.499341 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/074f71e8-1f93-48a6-9777-4d6450cd4989-catalog-content\") pod \"community-operators-gwmvs\" (UID: \"074f71e8-1f93-48a6-9777-4d6450cd4989\") " pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.499730 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/074f71e8-1f93-48a6-9777-4d6450cd4989-catalog-content\") pod \"community-operators-gwmvs\" (UID: \"074f71e8-1f93-48a6-9777-4d6450cd4989\") " pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.499838 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/074f71e8-1f93-48a6-9777-4d6450cd4989-utilities\") pod \"community-operators-gwmvs\" (UID: \"074f71e8-1f93-48a6-9777-4d6450cd4989\") " pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.500076 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/074f71e8-1f93-48a6-9777-4d6450cd4989-utilities\") pod \"community-operators-gwmvs\" (UID: \"074f71e8-1f93-48a6-9777-4d6450cd4989\") " pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.500134 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzggh\" (UniqueName: \"kubernetes.io/projected/074f71e8-1f93-48a6-9777-4d6450cd4989-kube-api-access-jzggh\") pod \"community-operators-gwmvs\" (UID: \"074f71e8-1f93-48a6-9777-4d6450cd4989\") " pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.515238 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzggh\" (UniqueName: \"kubernetes.io/projected/074f71e8-1f93-48a6-9777-4d6450cd4989-kube-api-access-jzggh\") pod \"community-operators-gwmvs\" (UID: \"074f71e8-1f93-48a6-9777-4d6450cd4989\") " pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.633499 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.680062 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.842763 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h82r9"] Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.899812 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h82r9" event={"ID":"148fa9af-6094-448d-9c20-267ce0e3b04f","Type":"ContainerStarted","Data":"dbd16be3f7122aa379f49ccea536ec8641f6876775c580113f8ca3bbc53fcfee"} Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.905773 4916 generic.go:334] "Generic (PLEG): container finished" podID="a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73" containerID="479bc5a07b9c98e67227fbbd243769491a2a54dd899a690a413929e04adad779" exitCode=0 Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.905819 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrq2j" event={"ID":"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73","Type":"ContainerDied","Data":"479bc5a07b9c98e67227fbbd243769491a2a54dd899a690a413929e04adad779"} Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.907956 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w94d6" event={"ID":"3b2ecd5f-6381-4928-a111-7e17927c6096","Type":"ContainerStarted","Data":"7114a77cc59936513bb237163bd17be74dc519c0e61bc3398a2618c30c52a1c7"} Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.920900 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gwmvs"] Dec 03 19:35:51 crc kubenswrapper[4916]: I1203 19:35:51.951368 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w94d6" podStartSLOduration=2.570026318 podStartE2EDuration="3.951348499s" podCreationTimestamp="2025-12-03 19:35:48 +0000 UTC" firstStartedPulling="2025-12-03 19:35:49.881116674 +0000 UTC m=+365.843926940" lastFinishedPulling="2025-12-03 19:35:51.262438845 +0000 UTC m=+367.225249121" observedRunningTime="2025-12-03 19:35:51.94797549 +0000 UTC m=+367.910785756" watchObservedRunningTime="2025-12-03 19:35:51.951348499 +0000 UTC m=+367.914158765" Dec 03 19:35:52 crc kubenswrapper[4916]: I1203 19:35:52.916754 4916 generic.go:334] "Generic (PLEG): container finished" podID="148fa9af-6094-448d-9c20-267ce0e3b04f" containerID="9bdbee9c1e7818f3a5ec952c8f9dda532bde16c3508dd4202bbf0cdc961ca30f" exitCode=0 Dec 03 19:35:52 crc kubenswrapper[4916]: I1203 19:35:52.916947 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h82r9" event={"ID":"148fa9af-6094-448d-9c20-267ce0e3b04f","Type":"ContainerDied","Data":"9bdbee9c1e7818f3a5ec952c8f9dda532bde16c3508dd4202bbf0cdc961ca30f"} Dec 03 19:35:52 crc kubenswrapper[4916]: I1203 19:35:52.930634 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rrq2j" event={"ID":"a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73","Type":"ContainerStarted","Data":"99aa823ef1dbee737eb3e4622ced20b24a67a2ba848040ff29d3db4ea1669efe"} Dec 03 19:35:52 crc kubenswrapper[4916]: I1203 19:35:52.933782 4916 generic.go:334] "Generic (PLEG): container finished" podID="074f71e8-1f93-48a6-9777-4d6450cd4989" containerID="1cba5f355a40a131a40c6e9d3820fed9d52e8822147d683e2730db8dfe19cba7" exitCode=0 Dec 03 19:35:52 crc kubenswrapper[4916]: I1203 19:35:52.940745 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gwmvs" event={"ID":"074f71e8-1f93-48a6-9777-4d6450cd4989","Type":"ContainerDied","Data":"1cba5f355a40a131a40c6e9d3820fed9d52e8822147d683e2730db8dfe19cba7"} Dec 03 19:35:52 crc kubenswrapper[4916]: I1203 19:35:52.940797 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gwmvs" event={"ID":"074f71e8-1f93-48a6-9777-4d6450cd4989","Type":"ContainerStarted","Data":"e55ec8a107f5b473ae5da6e92a9768e3c4433723cf9f4680bdb1f232b452be46"} Dec 03 19:35:52 crc kubenswrapper[4916]: I1203 19:35:52.979353 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rrq2j" podStartSLOduration=2.350015959 podStartE2EDuration="4.979331435s" podCreationTimestamp="2025-12-03 19:35:48 +0000 UTC" firstStartedPulling="2025-12-03 19:35:49.878688261 +0000 UTC m=+365.841498527" lastFinishedPulling="2025-12-03 19:35:52.508003737 +0000 UTC m=+368.470814003" observedRunningTime="2025-12-03 19:35:52.976248474 +0000 UTC m=+368.939058740" watchObservedRunningTime="2025-12-03 19:35:52.979331435 +0000 UTC m=+368.942141721" Dec 03 19:35:53 crc kubenswrapper[4916]: I1203 19:35:53.035650 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-gmv88" Dec 03 19:35:53 crc kubenswrapper[4916]: I1203 19:35:53.095355 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwgl6"] Dec 03 19:35:53 crc kubenswrapper[4916]: I1203 19:35:53.940809 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h82r9" event={"ID":"148fa9af-6094-448d-9c20-267ce0e3b04f","Type":"ContainerStarted","Data":"e5404d01b8debc0ce7b95168f72a23741a48c18262a58913cd2fa16173c1fcc1"} Dec 03 19:35:53 crc kubenswrapper[4916]: I1203 19:35:53.943302 4916 generic.go:334] "Generic (PLEG): container finished" podID="074f71e8-1f93-48a6-9777-4d6450cd4989" containerID="8acbe6f65499c81a6c786db02a65013bfa350692cdcc76fe411c1e53e615ac82" exitCode=0 Dec 03 19:35:53 crc kubenswrapper[4916]: I1203 19:35:53.943418 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gwmvs" event={"ID":"074f71e8-1f93-48a6-9777-4d6450cd4989","Type":"ContainerDied","Data":"8acbe6f65499c81a6c786db02a65013bfa350692cdcc76fe411c1e53e615ac82"} Dec 03 19:35:54 crc kubenswrapper[4916]: I1203 19:35:54.949582 4916 generic.go:334] "Generic (PLEG): container finished" podID="148fa9af-6094-448d-9c20-267ce0e3b04f" containerID="e5404d01b8debc0ce7b95168f72a23741a48c18262a58913cd2fa16173c1fcc1" exitCode=0 Dec 03 19:35:54 crc kubenswrapper[4916]: I1203 19:35:54.949623 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h82r9" event={"ID":"148fa9af-6094-448d-9c20-267ce0e3b04f","Type":"ContainerDied","Data":"e5404d01b8debc0ce7b95168f72a23741a48c18262a58913cd2fa16173c1fcc1"} Dec 03 19:35:54 crc kubenswrapper[4916]: I1203 19:35:54.961990 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gwmvs" event={"ID":"074f71e8-1f93-48a6-9777-4d6450cd4989","Type":"ContainerStarted","Data":"e1c501649187446bc1ed549009bd0d30d799937a3108106be15d5bc06133e69f"} Dec 03 19:35:54 crc kubenswrapper[4916]: I1203 19:35:54.994635 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gwmvs" podStartSLOduration=2.502496457 podStartE2EDuration="3.994621104s" podCreationTimestamp="2025-12-03 19:35:51 +0000 UTC" firstStartedPulling="2025-12-03 19:35:52.942502052 +0000 UTC m=+368.905312328" lastFinishedPulling="2025-12-03 19:35:54.434626709 +0000 UTC m=+370.397436975" observedRunningTime="2025-12-03 19:35:54.991060951 +0000 UTC m=+370.953871217" watchObservedRunningTime="2025-12-03 19:35:54.994621104 +0000 UTC m=+370.957431370" Dec 03 19:35:56 crc kubenswrapper[4916]: I1203 19:35:56.973703 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h82r9" event={"ID":"148fa9af-6094-448d-9c20-267ce0e3b04f","Type":"ContainerStarted","Data":"f3dd30b5992f47d4ec85fe767e4afab2071d74fcdb1df2223d23dec2c8c96e4e"} Dec 03 19:35:56 crc kubenswrapper[4916]: I1203 19:35:56.996051 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h82r9" podStartSLOduration=3.550198779 podStartE2EDuration="5.99603247s" podCreationTimestamp="2025-12-03 19:35:51 +0000 UTC" firstStartedPulling="2025-12-03 19:35:52.919580583 +0000 UTC m=+368.882390849" lastFinishedPulling="2025-12-03 19:35:55.365414264 +0000 UTC m=+371.328224540" observedRunningTime="2025-12-03 19:35:56.992871417 +0000 UTC m=+372.955681723" watchObservedRunningTime="2025-12-03 19:35:56.99603247 +0000 UTC m=+372.958842746" Dec 03 19:35:59 crc kubenswrapper[4916]: I1203 19:35:59.113838 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:59 crc kubenswrapper[4916]: I1203 19:35:59.114210 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:59 crc kubenswrapper[4916]: I1203 19:35:59.188095 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:35:59 crc kubenswrapper[4916]: I1203 19:35:59.266381 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:59 crc kubenswrapper[4916]: I1203 19:35:59.266431 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:35:59 crc kubenswrapper[4916]: I1203 19:35:59.329277 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:36:00 crc kubenswrapper[4916]: I1203 19:36:00.037630 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w94d6" Dec 03 19:36:00 crc kubenswrapper[4916]: I1203 19:36:00.054454 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rrq2j" Dec 03 19:36:01 crc kubenswrapper[4916]: I1203 19:36:01.634061 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:36:01 crc kubenswrapper[4916]: I1203 19:36:01.634338 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:36:01 crc kubenswrapper[4916]: I1203 19:36:01.673051 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:36:01 crc kubenswrapper[4916]: I1203 19:36:01.680251 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:36:01 crc kubenswrapper[4916]: I1203 19:36:01.680307 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:36:01 crc kubenswrapper[4916]: I1203 19:36:01.723042 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:36:02 crc kubenswrapper[4916]: I1203 19:36:02.040835 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h82r9" Dec 03 19:36:02 crc kubenswrapper[4916]: I1203 19:36:02.041288 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gwmvs" Dec 03 19:36:16 crc kubenswrapper[4916]: I1203 19:36:16.159132 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:36:16 crc kubenswrapper[4916]: I1203 19:36:16.159814 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:36:16 crc kubenswrapper[4916]: I1203 19:36:16.159868 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:36:16 crc kubenswrapper[4916]: I1203 19:36:16.160511 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f565473c58fe0f0bf1244c6738ae4337c346a70b2f59f37b60836c87e6c33bc3"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 19:36:16 crc kubenswrapper[4916]: I1203 19:36:16.160605 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://f565473c58fe0f0bf1244c6738ae4337c346a70b2f59f37b60836c87e6c33bc3" gracePeriod=600 Dec 03 19:36:17 crc kubenswrapper[4916]: I1203 19:36:17.092740 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="f565473c58fe0f0bf1244c6738ae4337c346a70b2f59f37b60836c87e6c33bc3" exitCode=0 Dec 03 19:36:17 crc kubenswrapper[4916]: I1203 19:36:17.092829 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"f565473c58fe0f0bf1244c6738ae4337c346a70b2f59f37b60836c87e6c33bc3"} Dec 03 19:36:17 crc kubenswrapper[4916]: I1203 19:36:17.093651 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"659dd16e0d19a34a54acf070e9f83f7ea8206f5ca21d579f1ce0b13af7969996"} Dec 03 19:36:17 crc kubenswrapper[4916]: I1203 19:36:17.093700 4916 scope.go:117] "RemoveContainer" containerID="739f67286328c95d6cb221a1c643be5e550768cb4556a61df2be30a364326216" Dec 03 19:36:18 crc kubenswrapper[4916]: I1203 19:36:18.168301 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" podUID="151b79c1-f797-460a-9883-5af28efabd61" containerName="registry" containerID="cri-o://1e2d71c0197031482325678860ea120bc2517ee8f33bca7f93022ba00eddf1d3" gracePeriod=30 Dec 03 19:36:19 crc kubenswrapper[4916]: I1203 19:36:19.112503 4916 generic.go:334] "Generic (PLEG): container finished" podID="151b79c1-f797-460a-9883-5af28efabd61" containerID="1e2d71c0197031482325678860ea120bc2517ee8f33bca7f93022ba00eddf1d3" exitCode=0 Dec 03 19:36:19 crc kubenswrapper[4916]: I1203 19:36:19.112637 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" event={"ID":"151b79c1-f797-460a-9883-5af28efabd61","Type":"ContainerDied","Data":"1e2d71c0197031482325678860ea120bc2517ee8f33bca7f93022ba00eddf1d3"} Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.011444 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.121761 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-bound-sa-token\") pod \"151b79c1-f797-460a-9883-5af28efabd61\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.121838 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/151b79c1-f797-460a-9883-5af28efabd61-installation-pull-secrets\") pod \"151b79c1-f797-460a-9883-5af28efabd61\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.121866 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhkvf\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-kube-api-access-rhkvf\") pod \"151b79c1-f797-460a-9883-5af28efabd61\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.121912 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-registry-tls\") pod \"151b79c1-f797-460a-9883-5af28efabd61\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.121938 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-registry-certificates\") pod \"151b79c1-f797-460a-9883-5af28efabd61\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.122231 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"151b79c1-f797-460a-9883-5af28efabd61\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.122255 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/151b79c1-f797-460a-9883-5af28efabd61-ca-trust-extracted\") pod \"151b79c1-f797-460a-9883-5af28efabd61\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.122276 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-trusted-ca\") pod \"151b79c1-f797-460a-9883-5af28efabd61\" (UID: \"151b79c1-f797-460a-9883-5af28efabd61\") " Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.124668 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "151b79c1-f797-460a-9883-5af28efabd61" (UID: "151b79c1-f797-460a-9883-5af28efabd61"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.125716 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "151b79c1-f797-460a-9883-5af28efabd61" (UID: "151b79c1-f797-460a-9883-5af28efabd61"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.128727 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.128755 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwgl6" event={"ID":"151b79c1-f797-460a-9883-5af28efabd61","Type":"ContainerDied","Data":"d6fb74c385eaacb67dfddc57c9a9f16b40590ecde16360bf16f03cbc20d8df68"} Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.130336 4916 scope.go:117] "RemoveContainer" containerID="1e2d71c0197031482325678860ea120bc2517ee8f33bca7f93022ba00eddf1d3" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.132337 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-kube-api-access-rhkvf" (OuterVolumeSpecName: "kube-api-access-rhkvf") pod "151b79c1-f797-460a-9883-5af28efabd61" (UID: "151b79c1-f797-460a-9883-5af28efabd61"). InnerVolumeSpecName "kube-api-access-rhkvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.134501 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/151b79c1-f797-460a-9883-5af28efabd61-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "151b79c1-f797-460a-9883-5af28efabd61" (UID: "151b79c1-f797-460a-9883-5af28efabd61"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.136862 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "151b79c1-f797-460a-9883-5af28efabd61" (UID: "151b79c1-f797-460a-9883-5af28efabd61"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.139091 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "151b79c1-f797-460a-9883-5af28efabd61" (UID: "151b79c1-f797-460a-9883-5af28efabd61"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.144777 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "151b79c1-f797-460a-9883-5af28efabd61" (UID: "151b79c1-f797-460a-9883-5af28efabd61"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.150528 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/151b79c1-f797-460a-9883-5af28efabd61-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "151b79c1-f797-460a-9883-5af28efabd61" (UID: "151b79c1-f797-460a-9883-5af28efabd61"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.224054 4916 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.224127 4916 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.224147 4916 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/151b79c1-f797-460a-9883-5af28efabd61-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.224190 4916 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/151b79c1-f797-460a-9883-5af28efabd61-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.224206 4916 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.224218 4916 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/151b79c1-f797-460a-9883-5af28efabd61-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.224230 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhkvf\" (UniqueName: \"kubernetes.io/projected/151b79c1-f797-460a-9883-5af28efabd61-kube-api-access-rhkvf\") on node \"crc\" DevicePath \"\"" Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.495246 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwgl6"] Dec 03 19:36:20 crc kubenswrapper[4916]: I1203 19:36:20.495848 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwgl6"] Dec 03 19:36:22 crc kubenswrapper[4916]: I1203 19:36:22.487329 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="151b79c1-f797-460a-9883-5af28efabd61" path="/var/lib/kubelet/pods/151b79c1-f797-460a-9883-5af28efabd61/volumes" Dec 03 19:38:16 crc kubenswrapper[4916]: I1203 19:38:16.159150 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:38:16 crc kubenswrapper[4916]: I1203 19:38:16.159922 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:38:46 crc kubenswrapper[4916]: I1203 19:38:46.158745 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:38:46 crc kubenswrapper[4916]: I1203 19:38:46.159606 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:39:16 crc kubenswrapper[4916]: I1203 19:39:16.159170 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:39:16 crc kubenswrapper[4916]: I1203 19:39:16.159846 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:39:16 crc kubenswrapper[4916]: I1203 19:39:16.159907 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:39:16 crc kubenswrapper[4916]: I1203 19:39:16.160813 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"659dd16e0d19a34a54acf070e9f83f7ea8206f5ca21d579f1ce0b13af7969996"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 19:39:16 crc kubenswrapper[4916]: I1203 19:39:16.160983 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://659dd16e0d19a34a54acf070e9f83f7ea8206f5ca21d579f1ce0b13af7969996" gracePeriod=600 Dec 03 19:39:16 crc kubenswrapper[4916]: I1203 19:39:16.355886 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="659dd16e0d19a34a54acf070e9f83f7ea8206f5ca21d579f1ce0b13af7969996" exitCode=0 Dec 03 19:39:16 crc kubenswrapper[4916]: I1203 19:39:16.355954 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"659dd16e0d19a34a54acf070e9f83f7ea8206f5ca21d579f1ce0b13af7969996"} Dec 03 19:39:16 crc kubenswrapper[4916]: I1203 19:39:16.356014 4916 scope.go:117] "RemoveContainer" containerID="f565473c58fe0f0bf1244c6738ae4337c346a70b2f59f37b60836c87e6c33bc3" Dec 03 19:39:17 crc kubenswrapper[4916]: I1203 19:39:17.367821 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"02ae566c7ff459b62724fc48986cab4ba376415af729ca4442e9a81a3e43827b"} Dec 03 19:41:16 crc kubenswrapper[4916]: I1203 19:41:16.159082 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:41:16 crc kubenswrapper[4916]: I1203 19:41:16.159884 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.450927 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-pz52m"] Dec 03 19:41:42 crc kubenswrapper[4916]: E1203 19:41:42.451848 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="151b79c1-f797-460a-9883-5af28efabd61" containerName="registry" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.451892 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="151b79c1-f797-460a-9883-5af28efabd61" containerName="registry" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.452016 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="151b79c1-f797-460a-9883-5af28efabd61" containerName="registry" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.452549 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-pz52m" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.457725 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.458004 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-x7n5p"] Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.458736 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.459017 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-x7n5p" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.459242 4916 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-hxj76" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.461213 4916 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-mnb44" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.463548 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-pz52m"] Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.468153 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-gvbbc"] Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.469243 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-gvbbc" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.470626 4916 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-kvwg5" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.518370 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-x7n5p"] Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.526670 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-gvbbc"] Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.599163 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pfmh\" (UniqueName: \"kubernetes.io/projected/6e2a44e6-d6b3-4b89-a243-7a9f66b36e18-kube-api-access-9pfmh\") pod \"cert-manager-cainjector-7f985d654d-pz52m\" (UID: \"6e2a44e6-d6b3-4b89-a243-7a9f66b36e18\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-pz52m" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.599220 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-685gv\" (UniqueName: \"kubernetes.io/projected/8ac2850b-bf3b-4c3c-a1d1-8e59ce302246-kube-api-access-685gv\") pod \"cert-manager-webhook-5655c58dd6-gvbbc\" (UID: \"8ac2850b-bf3b-4c3c-a1d1-8e59ce302246\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-gvbbc" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.599268 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8k6n\" (UniqueName: \"kubernetes.io/projected/2f8dae92-e705-48ac-ae95-fb463698c6a7-kube-api-access-t8k6n\") pod \"cert-manager-5b446d88c5-x7n5p\" (UID: \"2f8dae92-e705-48ac-ae95-fb463698c6a7\") " pod="cert-manager/cert-manager-5b446d88c5-x7n5p" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.700084 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8k6n\" (UniqueName: \"kubernetes.io/projected/2f8dae92-e705-48ac-ae95-fb463698c6a7-kube-api-access-t8k6n\") pod \"cert-manager-5b446d88c5-x7n5p\" (UID: \"2f8dae92-e705-48ac-ae95-fb463698c6a7\") " pod="cert-manager/cert-manager-5b446d88c5-x7n5p" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.700155 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pfmh\" (UniqueName: \"kubernetes.io/projected/6e2a44e6-d6b3-4b89-a243-7a9f66b36e18-kube-api-access-9pfmh\") pod \"cert-manager-cainjector-7f985d654d-pz52m\" (UID: \"6e2a44e6-d6b3-4b89-a243-7a9f66b36e18\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-pz52m" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.700184 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-685gv\" (UniqueName: \"kubernetes.io/projected/8ac2850b-bf3b-4c3c-a1d1-8e59ce302246-kube-api-access-685gv\") pod \"cert-manager-webhook-5655c58dd6-gvbbc\" (UID: \"8ac2850b-bf3b-4c3c-a1d1-8e59ce302246\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-gvbbc" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.719227 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-685gv\" (UniqueName: \"kubernetes.io/projected/8ac2850b-bf3b-4c3c-a1d1-8e59ce302246-kube-api-access-685gv\") pod \"cert-manager-webhook-5655c58dd6-gvbbc\" (UID: \"8ac2850b-bf3b-4c3c-a1d1-8e59ce302246\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-gvbbc" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.721729 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8k6n\" (UniqueName: \"kubernetes.io/projected/2f8dae92-e705-48ac-ae95-fb463698c6a7-kube-api-access-t8k6n\") pod \"cert-manager-5b446d88c5-x7n5p\" (UID: \"2f8dae92-e705-48ac-ae95-fb463698c6a7\") " pod="cert-manager/cert-manager-5b446d88c5-x7n5p" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.722058 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pfmh\" (UniqueName: \"kubernetes.io/projected/6e2a44e6-d6b3-4b89-a243-7a9f66b36e18-kube-api-access-9pfmh\") pod \"cert-manager-cainjector-7f985d654d-pz52m\" (UID: \"6e2a44e6-d6b3-4b89-a243-7a9f66b36e18\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-pz52m" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.808338 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-pz52m" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.823586 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-x7n5p" Dec 03 19:41:42 crc kubenswrapper[4916]: I1203 19:41:42.829630 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-gvbbc" Dec 03 19:41:43 crc kubenswrapper[4916]: I1203 19:41:43.038113 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-pz52m"] Dec 03 19:41:43 crc kubenswrapper[4916]: I1203 19:41:43.051426 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 19:41:43 crc kubenswrapper[4916]: I1203 19:41:43.278558 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-pz52m" event={"ID":"6e2a44e6-d6b3-4b89-a243-7a9f66b36e18","Type":"ContainerStarted","Data":"8f19434895a47fc6069776dc64db5c224f8bec412f314c722f152fc07fbc9ce7"} Dec 03 19:41:43 crc kubenswrapper[4916]: I1203 19:41:43.290345 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-gvbbc"] Dec 03 19:41:43 crc kubenswrapper[4916]: I1203 19:41:43.302074 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-x7n5p"] Dec 03 19:41:43 crc kubenswrapper[4916]: W1203 19:41:43.309633 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2f8dae92_e705_48ac_ae95_fb463698c6a7.slice/crio-1d166be0fc0336cb1c8bc530de38b938c3ec96d9b1aa20044ad20315a0285ff7 WatchSource:0}: Error finding container 1d166be0fc0336cb1c8bc530de38b938c3ec96d9b1aa20044ad20315a0285ff7: Status 404 returned error can't find the container with id 1d166be0fc0336cb1c8bc530de38b938c3ec96d9b1aa20044ad20315a0285ff7 Dec 03 19:41:44 crc kubenswrapper[4916]: I1203 19:41:44.285433 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-gvbbc" event={"ID":"8ac2850b-bf3b-4c3c-a1d1-8e59ce302246","Type":"ContainerStarted","Data":"43e7ae3c1fb606bf73a42805be9e9420c97dc520eb8ce818c70485745b55a74a"} Dec 03 19:41:44 crc kubenswrapper[4916]: I1203 19:41:44.288031 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-x7n5p" event={"ID":"2f8dae92-e705-48ac-ae95-fb463698c6a7","Type":"ContainerStarted","Data":"1d166be0fc0336cb1c8bc530de38b938c3ec96d9b1aa20044ad20315a0285ff7"} Dec 03 19:41:46 crc kubenswrapper[4916]: I1203 19:41:46.158858 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:41:46 crc kubenswrapper[4916]: I1203 19:41:46.159151 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:41:47 crc kubenswrapper[4916]: I1203 19:41:47.303707 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-x7n5p" event={"ID":"2f8dae92-e705-48ac-ae95-fb463698c6a7","Type":"ContainerStarted","Data":"1d3a9987983d643434b0e78b88be4de9d40a675018e2834d7606a9c840d2c505"} Dec 03 19:41:47 crc kubenswrapper[4916]: I1203 19:41:47.305293 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-pz52m" event={"ID":"6e2a44e6-d6b3-4b89-a243-7a9f66b36e18","Type":"ContainerStarted","Data":"f4a99c0871bf78d84fac0e7dbd8ab7c4a3c47ab38ffd354113ee7ab8fcedd616"} Dec 03 19:41:47 crc kubenswrapper[4916]: I1203 19:41:47.307640 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-gvbbc" event={"ID":"8ac2850b-bf3b-4c3c-a1d1-8e59ce302246","Type":"ContainerStarted","Data":"c4b7d9dfb371a5ccaa7a8902ea322817a95dc4ec45eb83c54a90499e5531e662"} Dec 03 19:41:47 crc kubenswrapper[4916]: I1203 19:41:47.307776 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-gvbbc" Dec 03 19:41:47 crc kubenswrapper[4916]: I1203 19:41:47.320747 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-x7n5p" podStartSLOduration=2.331555045 podStartE2EDuration="5.320729853s" podCreationTimestamp="2025-12-03 19:41:42 +0000 UTC" firstStartedPulling="2025-12-03 19:41:43.311758363 +0000 UTC m=+719.274568629" lastFinishedPulling="2025-12-03 19:41:46.300933171 +0000 UTC m=+722.263743437" observedRunningTime="2025-12-03 19:41:47.317534116 +0000 UTC m=+723.280344382" watchObservedRunningTime="2025-12-03 19:41:47.320729853 +0000 UTC m=+723.283540119" Dec 03 19:41:47 crc kubenswrapper[4916]: I1203 19:41:47.355704 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-gvbbc" podStartSLOduration=2.416619293 podStartE2EDuration="5.355667061s" podCreationTimestamp="2025-12-03 19:41:42 +0000 UTC" firstStartedPulling="2025-12-03 19:41:43.29985371 +0000 UTC m=+719.262663976" lastFinishedPulling="2025-12-03 19:41:46.238901468 +0000 UTC m=+722.201711744" observedRunningTime="2025-12-03 19:41:47.340371026 +0000 UTC m=+723.303181332" watchObservedRunningTime="2025-12-03 19:41:47.355667061 +0000 UTC m=+723.318477367" Dec 03 19:41:47 crc kubenswrapper[4916]: I1203 19:41:47.370620 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-pz52m" podStartSLOduration=2.236733092 podStartE2EDuration="5.370601406s" podCreationTimestamp="2025-12-03 19:41:42 +0000 UTC" firstStartedPulling="2025-12-03 19:41:43.049393164 +0000 UTC m=+719.012203430" lastFinishedPulling="2025-12-03 19:41:46.183261478 +0000 UTC m=+722.146071744" observedRunningTime="2025-12-03 19:41:47.363075202 +0000 UTC m=+723.325885508" watchObservedRunningTime="2025-12-03 19:41:47.370601406 +0000 UTC m=+723.333411742" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.338661 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-c9jfr"] Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.339453 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovn-controller" containerID="cri-o://67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4" gracePeriod=30 Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.339520 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="kube-rbac-proxy-node" containerID="cri-o://e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228" gracePeriod=30 Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.339587 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovn-acl-logging" containerID="cri-o://1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532" gracePeriod=30 Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.339621 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="sbdb" containerID="cri-o://d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0" gracePeriod=30 Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.339618 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53" gracePeriod=30 Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.339626 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="nbdb" containerID="cri-o://6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42" gracePeriod=30 Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.339520 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="northd" containerID="cri-o://ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa" gracePeriod=30 Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.372247 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" containerID="cri-o://bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4" gracePeriod=30 Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.619795 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/3.log" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.623043 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovn-acl-logging/0.log" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.623853 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovn-controller/0.log" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.624388 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690232 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-nwhgx"] Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690445 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="kubecfg-setup" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690456 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="kubecfg-setup" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690466 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690472 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690478 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="nbdb" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690484 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="nbdb" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690494 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690499 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690506 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690512 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690519 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690525 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690531 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovn-acl-logging" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690536 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovn-acl-logging" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690544 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovn-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690549 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovn-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690581 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="sbdb" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690587 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="sbdb" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690595 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="northd" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690601 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="northd" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690609 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="kube-rbac-proxy-node" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690615 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="kube-rbac-proxy-node" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690623 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690629 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690713 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690721 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovn-acl-logging" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690729 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="sbdb" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690738 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690745 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="nbdb" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690754 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690761 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690767 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690773 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovn-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690781 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="kube-rbac-proxy-node" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690806 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="northd" Dec 03 19:41:52 crc kubenswrapper[4916]: E1203 19:41:52.690885 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690892 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.690980 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerName="ovnkube-controller" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.692446 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.744193 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-systemd\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.744269 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-env-overrides\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.744967 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-etc-openvswitch\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745033 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-log-socket\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745052 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745120 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-var-lib-openvswitch\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745171 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-openvswitch\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745217 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-ovn-kubernetes\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745154 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745267 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-log-socket" (OuterVolumeSpecName: "log-socket") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745233 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745266 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-script-lib\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745305 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745333 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745442 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-ovn\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745507 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-slash\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745530 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745593 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-slash" (OuterVolumeSpecName: "host-slash") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745601 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pc7lq\" (UniqueName: \"kubernetes.io/projected/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-kube-api-access-pc7lq\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745647 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-bin\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745710 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-node-log\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745752 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-systemd-units\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745788 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-netd\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745845 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovn-node-metrics-cert\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745890 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-netns\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745925 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745991 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746003 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.745940 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-var-lib-cni-networks-ovn-kubernetes\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746038 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746044 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746059 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746095 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-config\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746122 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-kubelet\") pod \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\" (UID: \"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18\") " Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746133 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-node-log" (OuterVolumeSpecName: "node-log") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746284 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746310 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f47872a1-55b4-4e4f-bf52-0e258b1292e9-env-overrides\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746331 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f47872a1-55b4-4e4f-bf52-0e258b1292e9-ovnkube-script-lib\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746366 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-log-socket\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746391 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-kubelet\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746466 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-var-lib-openvswitch\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746532 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-slash\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746682 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bnfw\" (UniqueName: \"kubernetes.io/projected/f47872a1-55b4-4e4f-bf52-0e258b1292e9-kube-api-access-4bnfw\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746704 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746798 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-run-ovn\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746879 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-etc-openvswitch\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.746941 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-run-openvswitch\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747010 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747074 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-run-netns\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747128 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-cni-bin\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747189 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-cni-netd\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747251 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f47872a1-55b4-4e4f-bf52-0e258b1292e9-ovnkube-config\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747322 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f47872a1-55b4-4e4f-bf52-0e258b1292e9-ovn-node-metrics-cert\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747382 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-run-ovn-kubernetes\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747433 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-node-log\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747481 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-run-systemd\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747546 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-systemd-units\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747633 4916 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747652 4916 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747665 4916 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-log-socket\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747677 4916 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747690 4916 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747700 4916 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747714 4916 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747727 4916 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747741 4916 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-slash\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747754 4916 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747768 4916 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-node-log\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747782 4916 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747806 4916 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747820 4916 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747835 4916 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747850 4916 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.747866 4916 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.751355 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-kube-api-access-pc7lq" (OuterVolumeSpecName: "kube-api-access-pc7lq") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "kube-api-access-pc7lq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.751749 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.762197 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" (UID: "990ba077-9bb2-4ab0-b098-c4c6fd6f4f18"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.834261 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-gvbbc" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.848754 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-cni-netd\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.848821 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f47872a1-55b4-4e4f-bf52-0e258b1292e9-ovnkube-config\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.848862 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f47872a1-55b4-4e4f-bf52-0e258b1292e9-ovn-node-metrics-cert\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.848905 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-run-ovn-kubernetes\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.848951 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-node-log\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849133 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-run-systemd\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849171 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-systemd-units\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849215 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f47872a1-55b4-4e4f-bf52-0e258b1292e9-env-overrides\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849248 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f47872a1-55b4-4e4f-bf52-0e258b1292e9-ovnkube-script-lib\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849289 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-log-socket\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849325 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-kubelet\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849357 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-var-lib-openvswitch\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849393 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-slash\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849435 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bnfw\" (UniqueName: \"kubernetes.io/projected/f47872a1-55b4-4e4f-bf52-0e258b1292e9-kube-api-access-4bnfw\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849473 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-run-ovn\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849515 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-etc-openvswitch\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849549 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-run-openvswitch\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849629 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849668 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-run-netns\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849701 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-cni-bin\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849768 4916 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849795 4916 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849817 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pc7lq\" (UniqueName: \"kubernetes.io/projected/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18-kube-api-access-pc7lq\") on node \"crc\" DevicePath \"\"" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849883 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-cni-bin\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849908 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-log-socket\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849942 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-etc-openvswitch\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849945 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-run-ovn\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849998 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-run-openvswitch\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.849995 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-cni-netd\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.850042 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-kubelet\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.850092 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-node-log\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.850125 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-var-lib-openvswitch\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.850136 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-run-netns\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.850167 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-slash\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.850614 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-run-systemd\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.850656 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-systemd-units\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.850679 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-run-ovn-kubernetes\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.851140 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f47872a1-55b4-4e4f-bf52-0e258b1292e9-env-overrides\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.851238 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f47872a1-55b4-4e4f-bf52-0e258b1292e9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.851493 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f47872a1-55b4-4e4f-bf52-0e258b1292e9-ovnkube-script-lib\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.851899 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f47872a1-55b4-4e4f-bf52-0e258b1292e9-ovnkube-config\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.857924 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f47872a1-55b4-4e4f-bf52-0e258b1292e9-ovn-node-metrics-cert\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:52 crc kubenswrapper[4916]: I1203 19:41:52.886906 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bnfw\" (UniqueName: \"kubernetes.io/projected/f47872a1-55b4-4e4f-bf52-0e258b1292e9-kube-api-access-4bnfw\") pod \"ovnkube-node-nwhgx\" (UID: \"f47872a1-55b4-4e4f-bf52-0e258b1292e9\") " pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.015584 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.382221 4916 generic.go:334] "Generic (PLEG): container finished" podID="f47872a1-55b4-4e4f-bf52-0e258b1292e9" containerID="b80ce6b75a6c387d8dc3ffa1b4b81522c8ef013a07a2e53755b80707b58c1490" exitCode=0 Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.382315 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" event={"ID":"f47872a1-55b4-4e4f-bf52-0e258b1292e9","Type":"ContainerDied","Data":"b80ce6b75a6c387d8dc3ffa1b4b81522c8ef013a07a2e53755b80707b58c1490"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.382413 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" event={"ID":"f47872a1-55b4-4e4f-bf52-0e258b1292e9","Type":"ContainerStarted","Data":"f28289d618e98e023fce0bd3f18c681e50de8f0d3195e4f7d01135343b9c7e9b"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.385741 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4vkgz_d75c407a-2bbd-4cc3-bc0e-b1010aeeab57/kube-multus/2.log" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.386339 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4vkgz_d75c407a-2bbd-4cc3-bc0e-b1010aeeab57/kube-multus/1.log" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.386382 4916 generic.go:334] "Generic (PLEG): container finished" podID="d75c407a-2bbd-4cc3-bc0e-b1010aeeab57" containerID="89f2c4f684ccae25dd297700bc3c44bbf0021dc479bb07ef77e0d6ba48e131fb" exitCode=2 Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.386463 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4vkgz" event={"ID":"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57","Type":"ContainerDied","Data":"89f2c4f684ccae25dd297700bc3c44bbf0021dc479bb07ef77e0d6ba48e131fb"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.386502 4916 scope.go:117] "RemoveContainer" containerID="37078ca4b374c5f549b19fb8deff1bfc1145abaabcef86fc3c9b32172e6f10f5" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.387707 4916 scope.go:117] "RemoveContainer" containerID="89f2c4f684ccae25dd297700bc3c44bbf0021dc479bb07ef77e0d6ba48e131fb" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.392173 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovnkube-controller/3.log" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.410267 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovn-acl-logging/0.log" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.411152 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c9jfr_990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/ovn-controller/0.log" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.411861 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4" exitCode=0 Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.412050 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0" exitCode=0 Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.412160 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42" exitCode=0 Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.412269 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa" exitCode=0 Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.412403 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53" exitCode=0 Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.412509 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228" exitCode=0 Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.412639 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532" exitCode=143 Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.412778 4916 generic.go:334] "Generic (PLEG): container finished" podID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" containerID="67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4" exitCode=143 Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.412481 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.412546 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414336 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414369 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414383 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414403 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414415 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414428 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414459 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414467 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414474 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414481 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414489 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414495 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414502 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414509 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414516 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414525 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414535 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414542 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414549 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414556 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414615 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414623 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414633 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414640 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414648 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414655 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414665 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414679 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414687 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414694 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414701 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414708 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414714 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414721 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414727 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414734 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414741 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414750 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c9jfr" event={"ID":"990ba077-9bb2-4ab0-b098-c4c6fd6f4f18","Type":"ContainerDied","Data":"61a946cddb2bdef40b7add5dca513fde134d581e8ab48797a05a0e85fcf0b49a"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414760 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414771 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414778 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414785 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414791 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414798 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414805 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414811 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414818 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.414825 4916 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77"} Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.442054 4916 scope.go:117] "RemoveContainer" containerID="bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.508317 4916 scope.go:117] "RemoveContainer" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.539295 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-c9jfr"] Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.544005 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-c9jfr"] Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.544829 4916 scope.go:117] "RemoveContainer" containerID="d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.569655 4916 scope.go:117] "RemoveContainer" containerID="6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.591134 4916 scope.go:117] "RemoveContainer" containerID="ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.610140 4916 scope.go:117] "RemoveContainer" containerID="252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.632728 4916 scope.go:117] "RemoveContainer" containerID="e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.650244 4916 scope.go:117] "RemoveContainer" containerID="1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.670133 4916 scope.go:117] "RemoveContainer" containerID="67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.709933 4916 scope.go:117] "RemoveContainer" containerID="024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.743065 4916 scope.go:117] "RemoveContainer" containerID="bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4" Dec 03 19:41:53 crc kubenswrapper[4916]: E1203 19:41:53.743585 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4\": container with ID starting with bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4 not found: ID does not exist" containerID="bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.743638 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4"} err="failed to get container status \"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4\": rpc error: code = NotFound desc = could not find container \"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4\": container with ID starting with bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.743671 4916 scope.go:117] "RemoveContainer" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" Dec 03 19:41:53 crc kubenswrapper[4916]: E1203 19:41:53.744137 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\": container with ID starting with 7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213 not found: ID does not exist" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.744167 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213"} err="failed to get container status \"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\": rpc error: code = NotFound desc = could not find container \"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\": container with ID starting with 7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.744185 4916 scope.go:117] "RemoveContainer" containerID="d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0" Dec 03 19:41:53 crc kubenswrapper[4916]: E1203 19:41:53.744623 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\": container with ID starting with d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0 not found: ID does not exist" containerID="d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.744787 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0"} err="failed to get container status \"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\": rpc error: code = NotFound desc = could not find container \"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\": container with ID starting with d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.744892 4916 scope.go:117] "RemoveContainer" containerID="6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42" Dec 03 19:41:53 crc kubenswrapper[4916]: E1203 19:41:53.745384 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\": container with ID starting with 6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42 not found: ID does not exist" containerID="6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.745416 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42"} err="failed to get container status \"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\": rpc error: code = NotFound desc = could not find container \"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\": container with ID starting with 6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.745437 4916 scope.go:117] "RemoveContainer" containerID="ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa" Dec 03 19:41:53 crc kubenswrapper[4916]: E1203 19:41:53.745726 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\": container with ID starting with ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa not found: ID does not exist" containerID="ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.745779 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa"} err="failed to get container status \"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\": rpc error: code = NotFound desc = could not find container \"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\": container with ID starting with ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.745815 4916 scope.go:117] "RemoveContainer" containerID="252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53" Dec 03 19:41:53 crc kubenswrapper[4916]: E1203 19:41:53.746176 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\": container with ID starting with 252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53 not found: ID does not exist" containerID="252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.746208 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53"} err="failed to get container status \"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\": rpc error: code = NotFound desc = could not find container \"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\": container with ID starting with 252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.746234 4916 scope.go:117] "RemoveContainer" containerID="e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228" Dec 03 19:41:53 crc kubenswrapper[4916]: E1203 19:41:53.746647 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\": container with ID starting with e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228 not found: ID does not exist" containerID="e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.746712 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228"} err="failed to get container status \"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\": rpc error: code = NotFound desc = could not find container \"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\": container with ID starting with e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.746749 4916 scope.go:117] "RemoveContainer" containerID="1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532" Dec 03 19:41:53 crc kubenswrapper[4916]: E1203 19:41:53.747118 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\": container with ID starting with 1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532 not found: ID does not exist" containerID="1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.747151 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532"} err="failed to get container status \"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\": rpc error: code = NotFound desc = could not find container \"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\": container with ID starting with 1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.747170 4916 scope.go:117] "RemoveContainer" containerID="67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4" Dec 03 19:41:53 crc kubenswrapper[4916]: E1203 19:41:53.747615 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\": container with ID starting with 67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4 not found: ID does not exist" containerID="67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.747722 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4"} err="failed to get container status \"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\": rpc error: code = NotFound desc = could not find container \"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\": container with ID starting with 67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.747814 4916 scope.go:117] "RemoveContainer" containerID="024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77" Dec 03 19:41:53 crc kubenswrapper[4916]: E1203 19:41:53.748244 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\": container with ID starting with 024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77 not found: ID does not exist" containerID="024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.748294 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77"} err="failed to get container status \"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\": rpc error: code = NotFound desc = could not find container \"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\": container with ID starting with 024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.748328 4916 scope.go:117] "RemoveContainer" containerID="bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.748787 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4"} err="failed to get container status \"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4\": rpc error: code = NotFound desc = could not find container \"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4\": container with ID starting with bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.748822 4916 scope.go:117] "RemoveContainer" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.749251 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213"} err="failed to get container status \"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\": rpc error: code = NotFound desc = could not find container \"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\": container with ID starting with 7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.749346 4916 scope.go:117] "RemoveContainer" containerID="d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.749804 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0"} err="failed to get container status \"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\": rpc error: code = NotFound desc = could not find container \"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\": container with ID starting with d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.749837 4916 scope.go:117] "RemoveContainer" containerID="6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.750215 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42"} err="failed to get container status \"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\": rpc error: code = NotFound desc = could not find container \"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\": container with ID starting with 6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.750245 4916 scope.go:117] "RemoveContainer" containerID="ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.750757 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa"} err="failed to get container status \"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\": rpc error: code = NotFound desc = could not find container \"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\": container with ID starting with ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.750785 4916 scope.go:117] "RemoveContainer" containerID="252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.751152 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53"} err="failed to get container status \"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\": rpc error: code = NotFound desc = could not find container \"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\": container with ID starting with 252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.751214 4916 scope.go:117] "RemoveContainer" containerID="e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.751654 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228"} err="failed to get container status \"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\": rpc error: code = NotFound desc = could not find container \"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\": container with ID starting with e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.751745 4916 scope.go:117] "RemoveContainer" containerID="1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.752359 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532"} err="failed to get container status \"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\": rpc error: code = NotFound desc = could not find container \"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\": container with ID starting with 1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.752390 4916 scope.go:117] "RemoveContainer" containerID="67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.752865 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4"} err="failed to get container status \"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\": rpc error: code = NotFound desc = could not find container \"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\": container with ID starting with 67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.752912 4916 scope.go:117] "RemoveContainer" containerID="024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.753181 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77"} err="failed to get container status \"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\": rpc error: code = NotFound desc = could not find container \"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\": container with ID starting with 024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.753216 4916 scope.go:117] "RemoveContainer" containerID="bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.753722 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4"} err="failed to get container status \"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4\": rpc error: code = NotFound desc = could not find container \"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4\": container with ID starting with bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.753766 4916 scope.go:117] "RemoveContainer" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.754207 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213"} err="failed to get container status \"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\": rpc error: code = NotFound desc = could not find container \"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\": container with ID starting with 7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.754248 4916 scope.go:117] "RemoveContainer" containerID="d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.754746 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0"} err="failed to get container status \"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\": rpc error: code = NotFound desc = could not find container \"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\": container with ID starting with d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.754779 4916 scope.go:117] "RemoveContainer" containerID="6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.755064 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42"} err="failed to get container status \"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\": rpc error: code = NotFound desc = could not find container \"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\": container with ID starting with 6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.755106 4916 scope.go:117] "RemoveContainer" containerID="ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.755528 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa"} err="failed to get container status \"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\": rpc error: code = NotFound desc = could not find container \"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\": container with ID starting with ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.755558 4916 scope.go:117] "RemoveContainer" containerID="252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.755852 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53"} err="failed to get container status \"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\": rpc error: code = NotFound desc = could not find container \"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\": container with ID starting with 252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.755892 4916 scope.go:117] "RemoveContainer" containerID="e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.756454 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228"} err="failed to get container status \"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\": rpc error: code = NotFound desc = could not find container \"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\": container with ID starting with e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.756680 4916 scope.go:117] "RemoveContainer" containerID="1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.757167 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532"} err="failed to get container status \"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\": rpc error: code = NotFound desc = could not find container \"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\": container with ID starting with 1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.757203 4916 scope.go:117] "RemoveContainer" containerID="67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.757466 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4"} err="failed to get container status \"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\": rpc error: code = NotFound desc = could not find container \"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\": container with ID starting with 67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.757496 4916 scope.go:117] "RemoveContainer" containerID="024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.758328 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77"} err="failed to get container status \"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\": rpc error: code = NotFound desc = could not find container \"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\": container with ID starting with 024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.758368 4916 scope.go:117] "RemoveContainer" containerID="bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.758925 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4"} err="failed to get container status \"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4\": rpc error: code = NotFound desc = could not find container \"bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4\": container with ID starting with bd02298746191bc98460fd5b7560e006e7d203b0344384b63ffcbd55c0dba9d4 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.758953 4916 scope.go:117] "RemoveContainer" containerID="7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.759513 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213"} err="failed to get container status \"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\": rpc error: code = NotFound desc = could not find container \"7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213\": container with ID starting with 7b57cc11ae7d5f24b3fbaeca387f30c723679bab2926d16f9f9a9cc855255213 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.759603 4916 scope.go:117] "RemoveContainer" containerID="d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.759897 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0"} err="failed to get container status \"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\": rpc error: code = NotFound desc = could not find container \"d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0\": container with ID starting with d27e0348fdcd81c2041ef8d32402a404b28719a8706727ed2585cfd922c32bd0 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.759928 4916 scope.go:117] "RemoveContainer" containerID="6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.760234 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42"} err="failed to get container status \"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\": rpc error: code = NotFound desc = could not find container \"6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42\": container with ID starting with 6ae55acc6f358f0011c6133601268638b255c9d1df1f6f4e9f146e4ae20b8f42 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.760307 4916 scope.go:117] "RemoveContainer" containerID="ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.760588 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa"} err="failed to get container status \"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\": rpc error: code = NotFound desc = could not find container \"ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa\": container with ID starting with ee08aab1b8ec34ae27fa17df3526f7028c540ae16fdfe0d2dad5b3114bb891aa not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.760622 4916 scope.go:117] "RemoveContainer" containerID="252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.761056 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53"} err="failed to get container status \"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\": rpc error: code = NotFound desc = could not find container \"252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53\": container with ID starting with 252a1d262a2eb2e976bb1e2ae46260c3097451d06ec0bd76738ed59c5a8d8e53 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.761086 4916 scope.go:117] "RemoveContainer" containerID="e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.761454 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228"} err="failed to get container status \"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\": rpc error: code = NotFound desc = could not find container \"e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228\": container with ID starting with e320b5210910a903edfdeeda6a6e363bccbe4c209d928beeab59cf2ad6df1228 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.761522 4916 scope.go:117] "RemoveContainer" containerID="1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.761929 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532"} err="failed to get container status \"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\": rpc error: code = NotFound desc = could not find container \"1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532\": container with ID starting with 1f4ef8db1158b206c4e1986647bb8c14cf6bf76950f749132248dd4c92643532 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.761978 4916 scope.go:117] "RemoveContainer" containerID="67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.762512 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4"} err="failed to get container status \"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\": rpc error: code = NotFound desc = could not find container \"67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4\": container with ID starting with 67a57917039d5e04077c49f4b247d198188533210c7f974d4b18dc0dea5e10b4 not found: ID does not exist" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.762648 4916 scope.go:117] "RemoveContainer" containerID="024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77" Dec 03 19:41:53 crc kubenswrapper[4916]: I1203 19:41:53.762979 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77"} err="failed to get container status \"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\": rpc error: code = NotFound desc = could not find container \"024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77\": container with ID starting with 024e54e1f04a99bad47ddd6179b15e5cff7f778a883d0ce62485b10abdbbfa77 not found: ID does not exist" Dec 03 19:41:54 crc kubenswrapper[4916]: I1203 19:41:54.429823 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4vkgz_d75c407a-2bbd-4cc3-bc0e-b1010aeeab57/kube-multus/2.log" Dec 03 19:41:54 crc kubenswrapper[4916]: I1203 19:41:54.430425 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4vkgz" event={"ID":"d75c407a-2bbd-4cc3-bc0e-b1010aeeab57","Type":"ContainerStarted","Data":"7da4ede20b5e9b7e871ca1b4b61173843739b193162fbb63ef75996675500d2f"} Dec 03 19:41:54 crc kubenswrapper[4916]: I1203 19:41:54.442545 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" event={"ID":"f47872a1-55b4-4e4f-bf52-0e258b1292e9","Type":"ContainerStarted","Data":"0aa10b9c5b9c95727ac03414fa52eea69a54da936e51f25038169a999cdcfb5c"} Dec 03 19:41:54 crc kubenswrapper[4916]: I1203 19:41:54.442622 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" event={"ID":"f47872a1-55b4-4e4f-bf52-0e258b1292e9","Type":"ContainerStarted","Data":"6c16247d771be54270721e8de7c737f537a15b57163def005830f0bbdc868ce3"} Dec 03 19:41:54 crc kubenswrapper[4916]: I1203 19:41:54.442643 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" event={"ID":"f47872a1-55b4-4e4f-bf52-0e258b1292e9","Type":"ContainerStarted","Data":"7da2f62307c2b9f41f8cf060244166dd6f98b14ae3a1f25beadefd363f4439fe"} Dec 03 19:41:54 crc kubenswrapper[4916]: I1203 19:41:54.442661 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" event={"ID":"f47872a1-55b4-4e4f-bf52-0e258b1292e9","Type":"ContainerStarted","Data":"f8f14530f4ee9654c005223f2105ffecb0a8bff8d19981db6c63eff5e7843194"} Dec 03 19:41:54 crc kubenswrapper[4916]: I1203 19:41:54.442679 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" event={"ID":"f47872a1-55b4-4e4f-bf52-0e258b1292e9","Type":"ContainerStarted","Data":"2e1cbba1c20adf2a67deea0d8b770abd853470f99f92e45ad3d8ee1214050000"} Dec 03 19:41:54 crc kubenswrapper[4916]: I1203 19:41:54.442698 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" event={"ID":"f47872a1-55b4-4e4f-bf52-0e258b1292e9","Type":"ContainerStarted","Data":"bf27af3e5b92c7dc83c3489ef8d9adf16907346851a6eb3975418ded3b3b8671"} Dec 03 19:41:54 crc kubenswrapper[4916]: I1203 19:41:54.490287 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="990ba077-9bb2-4ab0-b098-c4c6fd6f4f18" path="/var/lib/kubelet/pods/990ba077-9bb2-4ab0-b098-c4c6fd6f4f18/volumes" Dec 03 19:41:57 crc kubenswrapper[4916]: I1203 19:41:57.471806 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" event={"ID":"f47872a1-55b4-4e4f-bf52-0e258b1292e9","Type":"ContainerStarted","Data":"07b948534a8b2f25c61efae09d8198b36968b7d75471742674cae81e201b4e0e"} Dec 03 19:41:59 crc kubenswrapper[4916]: I1203 19:41:59.486878 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" event={"ID":"f47872a1-55b4-4e4f-bf52-0e258b1292e9","Type":"ContainerStarted","Data":"1f934f3d83a0a86ee978fa297b5ea453fc5844ae388824b29e7994ded84f8733"} Dec 03 19:41:59 crc kubenswrapper[4916]: I1203 19:41:59.487524 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:59 crc kubenswrapper[4916]: I1203 19:41:59.487551 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:59 crc kubenswrapper[4916]: I1203 19:41:59.487589 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:59 crc kubenswrapper[4916]: I1203 19:41:59.523279 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:41:59 crc kubenswrapper[4916]: I1203 19:41:59.524644 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" podStartSLOduration=7.524623824 podStartE2EDuration="7.524623824s" podCreationTimestamp="2025-12-03 19:41:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:41:59.523884274 +0000 UTC m=+735.486694600" watchObservedRunningTime="2025-12-03 19:41:59.524623824 +0000 UTC m=+735.487434120" Dec 03 19:41:59 crc kubenswrapper[4916]: I1203 19:41:59.532460 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:42:16 crc kubenswrapper[4916]: I1203 19:42:16.159242 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:42:16 crc kubenswrapper[4916]: I1203 19:42:16.159938 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:42:16 crc kubenswrapper[4916]: I1203 19:42:16.160000 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:42:16 crc kubenswrapper[4916]: I1203 19:42:16.160782 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"02ae566c7ff459b62724fc48986cab4ba376415af729ca4442e9a81a3e43827b"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 19:42:16 crc kubenswrapper[4916]: I1203 19:42:16.160857 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://02ae566c7ff459b62724fc48986cab4ba376415af729ca4442e9a81a3e43827b" gracePeriod=600 Dec 03 19:42:16 crc kubenswrapper[4916]: I1203 19:42:16.601277 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="02ae566c7ff459b62724fc48986cab4ba376415af729ca4442e9a81a3e43827b" exitCode=0 Dec 03 19:42:16 crc kubenswrapper[4916]: I1203 19:42:16.601371 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"02ae566c7ff459b62724fc48986cab4ba376415af729ca4442e9a81a3e43827b"} Dec 03 19:42:16 crc kubenswrapper[4916]: I1203 19:42:16.601876 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"dbc6d2dff458c9d2c91a2f82a009f88b78c61b85becef77733114e92974e9b6f"} Dec 03 19:42:16 crc kubenswrapper[4916]: I1203 19:42:16.601912 4916 scope.go:117] "RemoveContainer" containerID="659dd16e0d19a34a54acf070e9f83f7ea8206f5ca21d579f1ce0b13af7969996" Dec 03 19:42:21 crc kubenswrapper[4916]: I1203 19:42:21.794371 4916 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 19:42:23 crc kubenswrapper[4916]: I1203 19:42:23.061092 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nwhgx" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.577520 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7"] Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.579861 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.582960 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.598852 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7"] Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.645790 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9gkb\" (UniqueName: \"kubernetes.io/projected/dfdad637-ca55-49e9-8065-75c8d2871739-kube-api-access-w9gkb\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.645909 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.645962 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.747450 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.747541 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.747741 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9gkb\" (UniqueName: \"kubernetes.io/projected/dfdad637-ca55-49e9-8065-75c8d2871739-kube-api-access-w9gkb\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.747998 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.748317 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.781431 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9gkb\" (UniqueName: \"kubernetes.io/projected/dfdad637-ca55-49e9-8065-75c8d2871739-kube-api-access-w9gkb\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:34 crc kubenswrapper[4916]: I1203 19:42:34.948491 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:35 crc kubenswrapper[4916]: I1203 19:42:35.216404 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7"] Dec 03 19:42:35 crc kubenswrapper[4916]: I1203 19:42:35.737194 4916 generic.go:334] "Generic (PLEG): container finished" podID="dfdad637-ca55-49e9-8065-75c8d2871739" containerID="dc86b4d953c6d9a3bb1c200f3517d1c349d01c9ea5ebe87336550fc839421e08" exitCode=0 Dec 03 19:42:35 crc kubenswrapper[4916]: I1203 19:42:35.737292 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" event={"ID":"dfdad637-ca55-49e9-8065-75c8d2871739","Type":"ContainerDied","Data":"dc86b4d953c6d9a3bb1c200f3517d1c349d01c9ea5ebe87336550fc839421e08"} Dec 03 19:42:35 crc kubenswrapper[4916]: I1203 19:42:35.737553 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" event={"ID":"dfdad637-ca55-49e9-8065-75c8d2871739","Type":"ContainerStarted","Data":"30d150ab73c9c66037b0ab819b8ae191e5e3d9b29c310d2ac1596f38b7bbd402"} Dec 03 19:42:36 crc kubenswrapper[4916]: I1203 19:42:36.885750 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lzfm5"] Dec 03 19:42:36 crc kubenswrapper[4916]: I1203 19:42:36.888143 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:36 crc kubenswrapper[4916]: I1203 19:42:36.896724 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lzfm5"] Dec 03 19:42:36 crc kubenswrapper[4916]: I1203 19:42:36.984630 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-utilities\") pod \"redhat-operators-lzfm5\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:36 crc kubenswrapper[4916]: I1203 19:42:36.984697 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fzd8\" (UniqueName: \"kubernetes.io/projected/6e0c7cb4-c356-4108-9600-fdc55276def9-kube-api-access-5fzd8\") pod \"redhat-operators-lzfm5\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:36 crc kubenswrapper[4916]: I1203 19:42:36.984903 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-catalog-content\") pod \"redhat-operators-lzfm5\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.086642 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-utilities\") pod \"redhat-operators-lzfm5\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.086699 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fzd8\" (UniqueName: \"kubernetes.io/projected/6e0c7cb4-c356-4108-9600-fdc55276def9-kube-api-access-5fzd8\") pod \"redhat-operators-lzfm5\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.086752 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-catalog-content\") pod \"redhat-operators-lzfm5\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.087247 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-utilities\") pod \"redhat-operators-lzfm5\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.087379 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-catalog-content\") pod \"redhat-operators-lzfm5\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.108130 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fzd8\" (UniqueName: \"kubernetes.io/projected/6e0c7cb4-c356-4108-9600-fdc55276def9-kube-api-access-5fzd8\") pod \"redhat-operators-lzfm5\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.216794 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.434019 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lzfm5"] Dec 03 19:42:37 crc kubenswrapper[4916]: W1203 19:42:37.435027 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e0c7cb4_c356_4108_9600_fdc55276def9.slice/crio-797ece7e3561d7b38ab1fb54fc64724eb202a84df684253ed6836b9ed00f88a5 WatchSource:0}: Error finding container 797ece7e3561d7b38ab1fb54fc64724eb202a84df684253ed6836b9ed00f88a5: Status 404 returned error can't find the container with id 797ece7e3561d7b38ab1fb54fc64724eb202a84df684253ed6836b9ed00f88a5 Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.754452 4916 generic.go:334] "Generic (PLEG): container finished" podID="dfdad637-ca55-49e9-8065-75c8d2871739" containerID="5dab9515ba7d23cd491fcf046fe03080c61afab7f1cf266a974075b8e5e0126d" exitCode=0 Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.754528 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" event={"ID":"dfdad637-ca55-49e9-8065-75c8d2871739","Type":"ContainerDied","Data":"5dab9515ba7d23cd491fcf046fe03080c61afab7f1cf266a974075b8e5e0126d"} Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.756292 4916 generic.go:334] "Generic (PLEG): container finished" podID="6e0c7cb4-c356-4108-9600-fdc55276def9" containerID="332df19fd6bbda0e748eb8f0b23761df894961c31b0ba4eaabfd232d85af6557" exitCode=0 Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.756339 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzfm5" event={"ID":"6e0c7cb4-c356-4108-9600-fdc55276def9","Type":"ContainerDied","Data":"332df19fd6bbda0e748eb8f0b23761df894961c31b0ba4eaabfd232d85af6557"} Dec 03 19:42:37 crc kubenswrapper[4916]: I1203 19:42:37.756373 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzfm5" event={"ID":"6e0c7cb4-c356-4108-9600-fdc55276def9","Type":"ContainerStarted","Data":"797ece7e3561d7b38ab1fb54fc64724eb202a84df684253ed6836b9ed00f88a5"} Dec 03 19:42:38 crc kubenswrapper[4916]: I1203 19:42:38.767241 4916 generic.go:334] "Generic (PLEG): container finished" podID="dfdad637-ca55-49e9-8065-75c8d2871739" containerID="2d98fef9d00ea2419f3894a5bba217fcf6f381e75cd54f29c97e352fe12e91d4" exitCode=0 Dec 03 19:42:38 crc kubenswrapper[4916]: I1203 19:42:38.767704 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" event={"ID":"dfdad637-ca55-49e9-8065-75c8d2871739","Type":"ContainerDied","Data":"2d98fef9d00ea2419f3894a5bba217fcf6f381e75cd54f29c97e352fe12e91d4"} Dec 03 19:42:38 crc kubenswrapper[4916]: I1203 19:42:38.772268 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzfm5" event={"ID":"6e0c7cb4-c356-4108-9600-fdc55276def9","Type":"ContainerStarted","Data":"4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996"} Dec 03 19:42:39 crc kubenswrapper[4916]: I1203 19:42:39.780427 4916 generic.go:334] "Generic (PLEG): container finished" podID="6e0c7cb4-c356-4108-9600-fdc55276def9" containerID="4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996" exitCode=0 Dec 03 19:42:39 crc kubenswrapper[4916]: I1203 19:42:39.780881 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzfm5" event={"ID":"6e0c7cb4-c356-4108-9600-fdc55276def9","Type":"ContainerDied","Data":"4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996"} Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.032103 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.130307 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-bundle\") pod \"dfdad637-ca55-49e9-8065-75c8d2871739\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.130437 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9gkb\" (UniqueName: \"kubernetes.io/projected/dfdad637-ca55-49e9-8065-75c8d2871739-kube-api-access-w9gkb\") pod \"dfdad637-ca55-49e9-8065-75c8d2871739\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.130486 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-util\") pod \"dfdad637-ca55-49e9-8065-75c8d2871739\" (UID: \"dfdad637-ca55-49e9-8065-75c8d2871739\") " Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.131462 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-bundle" (OuterVolumeSpecName: "bundle") pod "dfdad637-ca55-49e9-8065-75c8d2871739" (UID: "dfdad637-ca55-49e9-8065-75c8d2871739"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.136406 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfdad637-ca55-49e9-8065-75c8d2871739-kube-api-access-w9gkb" (OuterVolumeSpecName: "kube-api-access-w9gkb") pod "dfdad637-ca55-49e9-8065-75c8d2871739" (UID: "dfdad637-ca55-49e9-8065-75c8d2871739"). InnerVolumeSpecName "kube-api-access-w9gkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.164935 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-util" (OuterVolumeSpecName: "util") pod "dfdad637-ca55-49e9-8065-75c8d2871739" (UID: "dfdad637-ca55-49e9-8065-75c8d2871739"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.232323 4916 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.232365 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9gkb\" (UniqueName: \"kubernetes.io/projected/dfdad637-ca55-49e9-8065-75c8d2871739-kube-api-access-w9gkb\") on node \"crc\" DevicePath \"\"" Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.232374 4916 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfdad637-ca55-49e9-8065-75c8d2871739-util\") on node \"crc\" DevicePath \"\"" Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.794038 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.794011 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7" event={"ID":"dfdad637-ca55-49e9-8065-75c8d2871739","Type":"ContainerDied","Data":"30d150ab73c9c66037b0ab819b8ae191e5e3d9b29c310d2ac1596f38b7bbd402"} Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.794205 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30d150ab73c9c66037b0ab819b8ae191e5e3d9b29c310d2ac1596f38b7bbd402" Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.799191 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzfm5" event={"ID":"6e0c7cb4-c356-4108-9600-fdc55276def9","Type":"ContainerStarted","Data":"825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3"} Dec 03 19:42:40 crc kubenswrapper[4916]: I1203 19:42:40.842004 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lzfm5" podStartSLOduration=2.419046166 podStartE2EDuration="4.841976639s" podCreationTimestamp="2025-12-03 19:42:36 +0000 UTC" firstStartedPulling="2025-12-03 19:42:37.757911856 +0000 UTC m=+773.720722132" lastFinishedPulling="2025-12-03 19:42:40.180842349 +0000 UTC m=+776.143652605" observedRunningTime="2025-12-03 19:42:40.838272918 +0000 UTC m=+776.801083224" watchObservedRunningTime="2025-12-03 19:42:40.841976639 +0000 UTC m=+776.804786935" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.401415 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-6jmkr"] Dec 03 19:42:44 crc kubenswrapper[4916]: E1203 19:42:44.401666 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfdad637-ca55-49e9-8065-75c8d2871739" containerName="util" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.401679 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfdad637-ca55-49e9-8065-75c8d2871739" containerName="util" Dec 03 19:42:44 crc kubenswrapper[4916]: E1203 19:42:44.401701 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfdad637-ca55-49e9-8065-75c8d2871739" containerName="extract" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.401707 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfdad637-ca55-49e9-8065-75c8d2871739" containerName="extract" Dec 03 19:42:44 crc kubenswrapper[4916]: E1203 19:42:44.401715 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfdad637-ca55-49e9-8065-75c8d2871739" containerName="pull" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.401721 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfdad637-ca55-49e9-8065-75c8d2871739" containerName="pull" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.401831 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfdad637-ca55-49e9-8065-75c8d2871739" containerName="extract" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.402195 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6jmkr" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.404652 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.405549 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-649pb\" (UniqueName: \"kubernetes.io/projected/697f3c53-6482-4054-8f02-fe024ba5f514-kube-api-access-649pb\") pod \"nmstate-operator-5b5b58f5c8-6jmkr\" (UID: \"697f3c53-6482-4054-8f02-fe024ba5f514\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6jmkr" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.405636 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.410297 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-5m5pd" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.458322 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-6jmkr"] Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.506355 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-649pb\" (UniqueName: \"kubernetes.io/projected/697f3c53-6482-4054-8f02-fe024ba5f514-kube-api-access-649pb\") pod \"nmstate-operator-5b5b58f5c8-6jmkr\" (UID: \"697f3c53-6482-4054-8f02-fe024ba5f514\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6jmkr" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.519825 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.530511 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.546829 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-649pb\" (UniqueName: \"kubernetes.io/projected/697f3c53-6482-4054-8f02-fe024ba5f514-kube-api-access-649pb\") pod \"nmstate-operator-5b5b58f5c8-6jmkr\" (UID: \"697f3c53-6482-4054-8f02-fe024ba5f514\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6jmkr" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.723761 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-5m5pd" Dec 03 19:42:44 crc kubenswrapper[4916]: I1203 19:42:44.732057 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6jmkr" Dec 03 19:42:45 crc kubenswrapper[4916]: I1203 19:42:45.166493 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-6jmkr"] Dec 03 19:42:45 crc kubenswrapper[4916]: W1203 19:42:45.181686 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod697f3c53_6482_4054_8f02_fe024ba5f514.slice/crio-528d9597be121a043a0f63b74329b81845386a99d9b4c30f77b530d0600c8bb9 WatchSource:0}: Error finding container 528d9597be121a043a0f63b74329b81845386a99d9b4c30f77b530d0600c8bb9: Status 404 returned error can't find the container with id 528d9597be121a043a0f63b74329b81845386a99d9b4c30f77b530d0600c8bb9 Dec 03 19:42:45 crc kubenswrapper[4916]: I1203 19:42:45.832732 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6jmkr" event={"ID":"697f3c53-6482-4054-8f02-fe024ba5f514","Type":"ContainerStarted","Data":"528d9597be121a043a0f63b74329b81845386a99d9b4c30f77b530d0600c8bb9"} Dec 03 19:42:47 crc kubenswrapper[4916]: I1203 19:42:47.217081 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:47 crc kubenswrapper[4916]: I1203 19:42:47.217351 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:47 crc kubenswrapper[4916]: I1203 19:42:47.277314 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:47 crc kubenswrapper[4916]: I1203 19:42:47.904263 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:48 crc kubenswrapper[4916]: I1203 19:42:48.855386 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6jmkr" event={"ID":"697f3c53-6482-4054-8f02-fe024ba5f514","Type":"ContainerStarted","Data":"4d8359aa16f8314e67eb32e9ae098e4c0981d156efc6566bae43350150d6f0aa"} Dec 03 19:42:48 crc kubenswrapper[4916]: I1203 19:42:48.875602 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-6jmkr" podStartSLOduration=2.276741737 podStartE2EDuration="4.875586044s" podCreationTimestamp="2025-12-03 19:42:44 +0000 UTC" firstStartedPulling="2025-12-03 19:42:45.183587855 +0000 UTC m=+781.146398121" lastFinishedPulling="2025-12-03 19:42:47.782432142 +0000 UTC m=+783.745242428" observedRunningTime="2025-12-03 19:42:48.873826106 +0000 UTC m=+784.836636392" watchObservedRunningTime="2025-12-03 19:42:48.875586044 +0000 UTC m=+784.838396310" Dec 03 19:42:49 crc kubenswrapper[4916]: I1203 19:42:49.878000 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lzfm5"] Dec 03 19:42:49 crc kubenswrapper[4916]: I1203 19:42:49.878323 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lzfm5" podUID="6e0c7cb4-c356-4108-9600-fdc55276def9" containerName="registry-server" containerID="cri-o://825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3" gracePeriod=2 Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.700264 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.704706 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fzd8\" (UniqueName: \"kubernetes.io/projected/6e0c7cb4-c356-4108-9600-fdc55276def9-kube-api-access-5fzd8\") pod \"6e0c7cb4-c356-4108-9600-fdc55276def9\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.704742 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-utilities\") pod \"6e0c7cb4-c356-4108-9600-fdc55276def9\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.704785 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-catalog-content\") pod \"6e0c7cb4-c356-4108-9600-fdc55276def9\" (UID: \"6e0c7cb4-c356-4108-9600-fdc55276def9\") " Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.710727 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e0c7cb4-c356-4108-9600-fdc55276def9-kube-api-access-5fzd8" (OuterVolumeSpecName: "kube-api-access-5fzd8") pod "6e0c7cb4-c356-4108-9600-fdc55276def9" (UID: "6e0c7cb4-c356-4108-9600-fdc55276def9"). InnerVolumeSpecName "kube-api-access-5fzd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.719154 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-utilities" (OuterVolumeSpecName: "utilities") pod "6e0c7cb4-c356-4108-9600-fdc55276def9" (UID: "6e0c7cb4-c356-4108-9600-fdc55276def9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.806643 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fzd8\" (UniqueName: \"kubernetes.io/projected/6e0c7cb4-c356-4108-9600-fdc55276def9-kube-api-access-5fzd8\") on node \"crc\" DevicePath \"\"" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.806729 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.812936 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6e0c7cb4-c356-4108-9600-fdc55276def9" (UID: "6e0c7cb4-c356-4108-9600-fdc55276def9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.881533 4916 generic.go:334] "Generic (PLEG): container finished" podID="6e0c7cb4-c356-4108-9600-fdc55276def9" containerID="825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3" exitCode=0 Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.881637 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lzfm5" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.881631 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzfm5" event={"ID":"6e0c7cb4-c356-4108-9600-fdc55276def9","Type":"ContainerDied","Data":"825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3"} Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.881707 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lzfm5" event={"ID":"6e0c7cb4-c356-4108-9600-fdc55276def9","Type":"ContainerDied","Data":"797ece7e3561d7b38ab1fb54fc64724eb202a84df684253ed6836b9ed00f88a5"} Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.881742 4916 scope.go:117] "RemoveContainer" containerID="825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.910915 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e0c7cb4-c356-4108-9600-fdc55276def9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.912429 4916 scope.go:117] "RemoveContainer" containerID="4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.913038 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lzfm5"] Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.916819 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lzfm5"] Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.960823 4916 scope.go:117] "RemoveContainer" containerID="332df19fd6bbda0e748eb8f0b23761df894961c31b0ba4eaabfd232d85af6557" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.980623 4916 scope.go:117] "RemoveContainer" containerID="825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3" Dec 03 19:42:51 crc kubenswrapper[4916]: E1203 19:42:51.981101 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3\": container with ID starting with 825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3 not found: ID does not exist" containerID="825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.981132 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3"} err="failed to get container status \"825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3\": rpc error: code = NotFound desc = could not find container \"825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3\": container with ID starting with 825fbf9202795df9fcb0634c6c47b8f6816241636742e7926a9a497ca1be95e3 not found: ID does not exist" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.981152 4916 scope.go:117] "RemoveContainer" containerID="4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996" Dec 03 19:42:51 crc kubenswrapper[4916]: E1203 19:42:51.981540 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996\": container with ID starting with 4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996 not found: ID does not exist" containerID="4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.981614 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996"} err="failed to get container status \"4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996\": rpc error: code = NotFound desc = could not find container \"4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996\": container with ID starting with 4783dca261ac8dbd3cf78af3bea4978919a759e268cfe68f9224b624548b6996 not found: ID does not exist" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.981645 4916 scope.go:117] "RemoveContainer" containerID="332df19fd6bbda0e748eb8f0b23761df894961c31b0ba4eaabfd232d85af6557" Dec 03 19:42:51 crc kubenswrapper[4916]: E1203 19:42:51.981964 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"332df19fd6bbda0e748eb8f0b23761df894961c31b0ba4eaabfd232d85af6557\": container with ID starting with 332df19fd6bbda0e748eb8f0b23761df894961c31b0ba4eaabfd232d85af6557 not found: ID does not exist" containerID="332df19fd6bbda0e748eb8f0b23761df894961c31b0ba4eaabfd232d85af6557" Dec 03 19:42:51 crc kubenswrapper[4916]: I1203 19:42:51.981985 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"332df19fd6bbda0e748eb8f0b23761df894961c31b0ba4eaabfd232d85af6557"} err="failed to get container status \"332df19fd6bbda0e748eb8f0b23761df894961c31b0ba4eaabfd232d85af6557\": rpc error: code = NotFound desc = could not find container \"332df19fd6bbda0e748eb8f0b23761df894961c31b0ba4eaabfd232d85af6557\": container with ID starting with 332df19fd6bbda0e748eb8f0b23761df894961c31b0ba4eaabfd232d85af6557 not found: ID does not exist" Dec 03 19:42:52 crc kubenswrapper[4916]: I1203 19:42:52.491699 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e0c7cb4-c356-4108-9600-fdc55276def9" path="/var/lib/kubelet/pods/6e0c7cb4-c356-4108-9600-fdc55276def9/volumes" Dec 03 19:42:54 crc kubenswrapper[4916]: I1203 19:42:54.982082 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn"] Dec 03 19:42:54 crc kubenswrapper[4916]: E1203 19:42:54.982723 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e0c7cb4-c356-4108-9600-fdc55276def9" containerName="extract-content" Dec 03 19:42:54 crc kubenswrapper[4916]: I1203 19:42:54.982747 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e0c7cb4-c356-4108-9600-fdc55276def9" containerName="extract-content" Dec 03 19:42:54 crc kubenswrapper[4916]: E1203 19:42:54.982765 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e0c7cb4-c356-4108-9600-fdc55276def9" containerName="registry-server" Dec 03 19:42:54 crc kubenswrapper[4916]: I1203 19:42:54.982779 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e0c7cb4-c356-4108-9600-fdc55276def9" containerName="registry-server" Dec 03 19:42:54 crc kubenswrapper[4916]: E1203 19:42:54.982811 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e0c7cb4-c356-4108-9600-fdc55276def9" containerName="extract-utilities" Dec 03 19:42:54 crc kubenswrapper[4916]: I1203 19:42:54.982825 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e0c7cb4-c356-4108-9600-fdc55276def9" containerName="extract-utilities" Dec 03 19:42:54 crc kubenswrapper[4916]: I1203 19:42:54.982996 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e0c7cb4-c356-4108-9600-fdc55276def9" containerName="registry-server" Dec 03 19:42:54 crc kubenswrapper[4916]: I1203 19:42:54.983936 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn" Dec 03 19:42:54 crc kubenswrapper[4916]: I1203 19:42:54.986536 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-m5vcm" Dec 03 19:42:54 crc kubenswrapper[4916]: I1203 19:42:54.991144 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn"] Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.010682 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt"] Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.012123 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.014343 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.019812 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-gnlxq"] Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.020561 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.035416 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt"] Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.052265 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h7vn\" (UniqueName: \"kubernetes.io/projected/7603911b-7915-49df-afc6-e80da6dd90f2-kube-api-access-9h7vn\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.052310 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5wzd\" (UniqueName: \"kubernetes.io/projected/1710047a-61c2-485d-ad6c-05691f102e43-kube-api-access-l5wzd\") pod \"nmstate-webhook-5f6d4c5ccb-tqmwt\" (UID: \"1710047a-61c2-485d-ad6c-05691f102e43\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.052336 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7603911b-7915-49df-afc6-e80da6dd90f2-ovs-socket\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.052358 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvwdx\" (UniqueName: \"kubernetes.io/projected/a64200e6-4761-4d12-b787-7e0260253ffd-kube-api-access-wvwdx\") pod \"nmstate-metrics-7f946cbc9-nq5wn\" (UID: \"a64200e6-4761-4d12-b787-7e0260253ffd\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.052427 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1710047a-61c2-485d-ad6c-05691f102e43-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-tqmwt\" (UID: \"1710047a-61c2-485d-ad6c-05691f102e43\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.052463 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7603911b-7915-49df-afc6-e80da6dd90f2-nmstate-lock\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.052484 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7603911b-7915-49df-afc6-e80da6dd90f2-dbus-socket\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.120784 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d"] Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.121512 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.123536 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.123882 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-hjw6f" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.123966 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.128638 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d"] Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154242 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1710047a-61c2-485d-ad6c-05691f102e43-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-tqmwt\" (UID: \"1710047a-61c2-485d-ad6c-05691f102e43\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154282 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7603911b-7915-49df-afc6-e80da6dd90f2-nmstate-lock\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154309 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7603911b-7915-49df-afc6-e80da6dd90f2-dbus-socket\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154379 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h7vn\" (UniqueName: \"kubernetes.io/projected/7603911b-7915-49df-afc6-e80da6dd90f2-kube-api-access-9h7vn\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: E1203 19:42:55.154405 4916 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154405 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/7603911b-7915-49df-afc6-e80da6dd90f2-nmstate-lock\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154411 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4l4s\" (UniqueName: \"kubernetes.io/projected/9ce1f525-9698-4837-8ad5-990c187fd275-kube-api-access-h4l4s\") pod \"nmstate-console-plugin-7fbb5f6569-gbx7d\" (UID: \"9ce1f525-9698-4837-8ad5-990c187fd275\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:55 crc kubenswrapper[4916]: E1203 19:42:55.154474 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1710047a-61c2-485d-ad6c-05691f102e43-tls-key-pair podName:1710047a-61c2-485d-ad6c-05691f102e43 nodeName:}" failed. No retries permitted until 2025-12-03 19:42:55.654451246 +0000 UTC m=+791.617261522 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/1710047a-61c2-485d-ad6c-05691f102e43-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-tqmwt" (UID: "1710047a-61c2-485d-ad6c-05691f102e43") : secret "openshift-nmstate-webhook" not found Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154639 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5wzd\" (UniqueName: \"kubernetes.io/projected/1710047a-61c2-485d-ad6c-05691f102e43-kube-api-access-l5wzd\") pod \"nmstate-webhook-5f6d4c5ccb-tqmwt\" (UID: \"1710047a-61c2-485d-ad6c-05691f102e43\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154690 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7603911b-7915-49df-afc6-e80da6dd90f2-ovs-socket\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154710 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9ce1f525-9698-4837-8ad5-990c187fd275-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-gbx7d\" (UID: \"9ce1f525-9698-4837-8ad5-990c187fd275\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154730 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvwdx\" (UniqueName: \"kubernetes.io/projected/a64200e6-4761-4d12-b787-7e0260253ffd-kube-api-access-wvwdx\") pod \"nmstate-metrics-7f946cbc9-nq5wn\" (UID: \"a64200e6-4761-4d12-b787-7e0260253ffd\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154746 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/7603911b-7915-49df-afc6-e80da6dd90f2-dbus-socket\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154754 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9ce1f525-9698-4837-8ad5-990c187fd275-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-gbx7d\" (UID: \"9ce1f525-9698-4837-8ad5-990c187fd275\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.154777 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/7603911b-7915-49df-afc6-e80da6dd90f2-ovs-socket\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.175310 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvwdx\" (UniqueName: \"kubernetes.io/projected/a64200e6-4761-4d12-b787-7e0260253ffd-kube-api-access-wvwdx\") pod \"nmstate-metrics-7f946cbc9-nq5wn\" (UID: \"a64200e6-4761-4d12-b787-7e0260253ffd\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.175614 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5wzd\" (UniqueName: \"kubernetes.io/projected/1710047a-61c2-485d-ad6c-05691f102e43-kube-api-access-l5wzd\") pod \"nmstate-webhook-5f6d4c5ccb-tqmwt\" (UID: \"1710047a-61c2-485d-ad6c-05691f102e43\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.190111 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h7vn\" (UniqueName: \"kubernetes.io/projected/7603911b-7915-49df-afc6-e80da6dd90f2-kube-api-access-9h7vn\") pod \"nmstate-handler-gnlxq\" (UID: \"7603911b-7915-49df-afc6-e80da6dd90f2\") " pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.256624 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9ce1f525-9698-4837-8ad5-990c187fd275-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-gbx7d\" (UID: \"9ce1f525-9698-4837-8ad5-990c187fd275\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.256693 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9ce1f525-9698-4837-8ad5-990c187fd275-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-gbx7d\" (UID: \"9ce1f525-9698-4837-8ad5-990c187fd275\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.256782 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4l4s\" (UniqueName: \"kubernetes.io/projected/9ce1f525-9698-4837-8ad5-990c187fd275-kube-api-access-h4l4s\") pod \"nmstate-console-plugin-7fbb5f6569-gbx7d\" (UID: \"9ce1f525-9698-4837-8ad5-990c187fd275\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:55 crc kubenswrapper[4916]: E1203 19:42:55.256841 4916 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 03 19:42:55 crc kubenswrapper[4916]: E1203 19:42:55.256936 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9ce1f525-9698-4837-8ad5-990c187fd275-plugin-serving-cert podName:9ce1f525-9698-4837-8ad5-990c187fd275 nodeName:}" failed. No retries permitted until 2025-12-03 19:42:55.756917166 +0000 UTC m=+791.719727422 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/9ce1f525-9698-4837-8ad5-990c187fd275-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-gbx7d" (UID: "9ce1f525-9698-4837-8ad5-990c187fd275") : secret "plugin-serving-cert" not found Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.257755 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/9ce1f525-9698-4837-8ad5-990c187fd275-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-gbx7d\" (UID: \"9ce1f525-9698-4837-8ad5-990c187fd275\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.291432 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4l4s\" (UniqueName: \"kubernetes.io/projected/9ce1f525-9698-4837-8ad5-990c187fd275-kube-api-access-h4l4s\") pod \"nmstate-console-plugin-7fbb5f6569-gbx7d\" (UID: \"9ce1f525-9698-4837-8ad5-990c187fd275\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.308164 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.330815 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6589bd55cf-lx7qn"] Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.331424 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.349287 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6589bd55cf-lx7qn"] Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.358098 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/37b88403-6084-49a9-a592-77e5a44048b7-console-serving-cert\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.358141 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-trusted-ca-bundle\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.358162 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-console-config\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.358209 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5mt6t\" (UniqueName: \"kubernetes.io/projected/37b88403-6084-49a9-a592-77e5a44048b7-kube-api-access-5mt6t\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.358227 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-oauth-serving-cert\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.358276 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-service-ca\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.358293 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/37b88403-6084-49a9-a592-77e5a44048b7-console-oauth-config\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.361244 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.459780 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-service-ca\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.461134 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/37b88403-6084-49a9-a592-77e5a44048b7-console-oauth-config\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.461063 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-service-ca\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.461222 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/37b88403-6084-49a9-a592-77e5a44048b7-console-serving-cert\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.461781 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-trusted-ca-bundle\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.461809 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-console-config\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.462104 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5mt6t\" (UniqueName: \"kubernetes.io/projected/37b88403-6084-49a9-a592-77e5a44048b7-kube-api-access-5mt6t\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.462124 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-oauth-serving-cert\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.462684 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-oauth-serving-cert\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.463462 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-console-config\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.463496 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/37b88403-6084-49a9-a592-77e5a44048b7-trusted-ca-bundle\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.468946 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/37b88403-6084-49a9-a592-77e5a44048b7-console-oauth-config\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.469520 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/37b88403-6084-49a9-a592-77e5a44048b7-console-serving-cert\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.480885 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5mt6t\" (UniqueName: \"kubernetes.io/projected/37b88403-6084-49a9-a592-77e5a44048b7-kube-api-access-5mt6t\") pod \"console-6589bd55cf-lx7qn\" (UID: \"37b88403-6084-49a9-a592-77e5a44048b7\") " pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.665270 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1710047a-61c2-485d-ad6c-05691f102e43-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-tqmwt\" (UID: \"1710047a-61c2-485d-ad6c-05691f102e43\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.670860 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1710047a-61c2-485d-ad6c-05691f102e43-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-tqmwt\" (UID: \"1710047a-61c2-485d-ad6c-05691f102e43\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.672970 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.758995 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn"] Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.766284 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9ce1f525-9698-4837-8ad5-990c187fd275-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-gbx7d\" (UID: \"9ce1f525-9698-4837-8ad5-990c187fd275\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:55 crc kubenswrapper[4916]: W1203 19:42:55.766515 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda64200e6_4761_4d12_b787_7e0260253ffd.slice/crio-412603fe90f9673b20a03ebb1ed6f04ff2a9e1871edb4d1e1a500c8832993ceb WatchSource:0}: Error finding container 412603fe90f9673b20a03ebb1ed6f04ff2a9e1871edb4d1e1a500c8832993ceb: Status 404 returned error can't find the container with id 412603fe90f9673b20a03ebb1ed6f04ff2a9e1871edb4d1e1a500c8832993ceb Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.771300 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/9ce1f525-9698-4837-8ad5-990c187fd275-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-gbx7d\" (UID: \"9ce1f525-9698-4837-8ad5-990c187fd275\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.911431 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn" event={"ID":"a64200e6-4761-4d12-b787-7e0260253ffd","Type":"ContainerStarted","Data":"412603fe90f9673b20a03ebb1ed6f04ff2a9e1871edb4d1e1a500c8832993ceb"} Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.913133 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-gnlxq" event={"ID":"7603911b-7915-49df-afc6-e80da6dd90f2","Type":"ContainerStarted","Data":"83264fbb2dbd35a5cdeb4b0c950414751eff0006aaaee9d5eeb40a899c141b50"} Dec 03 19:42:55 crc kubenswrapper[4916]: I1203 19:42:55.951872 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" Dec 03 19:42:56 crc kubenswrapper[4916]: I1203 19:42:56.039409 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" Dec 03 19:42:56 crc kubenswrapper[4916]: I1203 19:42:56.162805 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6589bd55cf-lx7qn"] Dec 03 19:42:56 crc kubenswrapper[4916]: W1203 19:42:56.177716 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37b88403_6084_49a9_a592_77e5a44048b7.slice/crio-4fad2851dd596120ed3e9ad4682275bdeb059996ae1705008caadbeb4126be99 WatchSource:0}: Error finding container 4fad2851dd596120ed3e9ad4682275bdeb059996ae1705008caadbeb4126be99: Status 404 returned error can't find the container with id 4fad2851dd596120ed3e9ad4682275bdeb059996ae1705008caadbeb4126be99 Dec 03 19:42:56 crc kubenswrapper[4916]: I1203 19:42:56.200524 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt"] Dec 03 19:42:56 crc kubenswrapper[4916]: I1203 19:42:56.275530 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d"] Dec 03 19:42:56 crc kubenswrapper[4916]: W1203 19:42:56.284130 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ce1f525_9698_4837_8ad5_990c187fd275.slice/crio-aad769fb9a0b4f5e4629d463cc02ab72c24f661c2625be189e3afbbf1cdfc86a WatchSource:0}: Error finding container aad769fb9a0b4f5e4629d463cc02ab72c24f661c2625be189e3afbbf1cdfc86a: Status 404 returned error can't find the container with id aad769fb9a0b4f5e4629d463cc02ab72c24f661c2625be189e3afbbf1cdfc86a Dec 03 19:42:56 crc kubenswrapper[4916]: I1203 19:42:56.920668 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6589bd55cf-lx7qn" event={"ID":"37b88403-6084-49a9-a592-77e5a44048b7","Type":"ContainerStarted","Data":"efcf09373f1f753f2c1163d1c2746094c290b0e5db7c29166b19f5875791c2b0"} Dec 03 19:42:56 crc kubenswrapper[4916]: I1203 19:42:56.920718 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6589bd55cf-lx7qn" event={"ID":"37b88403-6084-49a9-a592-77e5a44048b7","Type":"ContainerStarted","Data":"4fad2851dd596120ed3e9ad4682275bdeb059996ae1705008caadbeb4126be99"} Dec 03 19:42:56 crc kubenswrapper[4916]: I1203 19:42:56.922921 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" event={"ID":"9ce1f525-9698-4837-8ad5-990c187fd275","Type":"ContainerStarted","Data":"aad769fb9a0b4f5e4629d463cc02ab72c24f661c2625be189e3afbbf1cdfc86a"} Dec 03 19:42:56 crc kubenswrapper[4916]: I1203 19:42:56.924761 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" event={"ID":"1710047a-61c2-485d-ad6c-05691f102e43","Type":"ContainerStarted","Data":"1ddeb893d1676c8d7372ad279a2d1b19b4907c66a742c2da055ea536eadc79af"} Dec 03 19:42:56 crc kubenswrapper[4916]: I1203 19:42:56.943505 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6589bd55cf-lx7qn" podStartSLOduration=1.943488681 podStartE2EDuration="1.943488681s" podCreationTimestamp="2025-12-03 19:42:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:42:56.93609302 +0000 UTC m=+792.898903326" watchObservedRunningTime="2025-12-03 19:42:56.943488681 +0000 UTC m=+792.906298947" Dec 03 19:42:57 crc kubenswrapper[4916]: I1203 19:42:57.939082 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" event={"ID":"1710047a-61c2-485d-ad6c-05691f102e43","Type":"ContainerStarted","Data":"cad3e4cdc03c394518b968f8cb6a1be49379bc09e4ea7a5b8447855974f2695a"} Dec 03 19:42:57 crc kubenswrapper[4916]: I1203 19:42:57.939610 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" Dec 03 19:42:57 crc kubenswrapper[4916]: I1203 19:42:57.941703 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn" event={"ID":"a64200e6-4761-4d12-b787-7e0260253ffd","Type":"ContainerStarted","Data":"4ef6730cb6cbc8865454e451ddcd3d20c5315e518f0d14be2b0beae4e6053859"} Dec 03 19:42:57 crc kubenswrapper[4916]: I1203 19:42:57.944297 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-gnlxq" event={"ID":"7603911b-7915-49df-afc6-e80da6dd90f2","Type":"ContainerStarted","Data":"6c056b229caabfd4445e663295d3f67cdc152704fa751d0217be9437389b8395"} Dec 03 19:42:57 crc kubenswrapper[4916]: I1203 19:42:57.944361 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:42:57 crc kubenswrapper[4916]: I1203 19:42:57.961609 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" podStartSLOduration=2.676512195 podStartE2EDuration="3.961592525s" podCreationTimestamp="2025-12-03 19:42:54 +0000 UTC" firstStartedPulling="2025-12-03 19:42:56.211895879 +0000 UTC m=+792.174706155" lastFinishedPulling="2025-12-03 19:42:57.496976209 +0000 UTC m=+793.459786485" observedRunningTime="2025-12-03 19:42:57.955738527 +0000 UTC m=+793.918548813" watchObservedRunningTime="2025-12-03 19:42:57.961592525 +0000 UTC m=+793.924402791" Dec 03 19:42:58 crc kubenswrapper[4916]: I1203 19:42:58.014447 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-gnlxq" podStartSLOduration=1.916133063 podStartE2EDuration="4.014433009s" podCreationTimestamp="2025-12-03 19:42:54 +0000 UTC" firstStartedPulling="2025-12-03 19:42:55.390523262 +0000 UTC m=+791.353333528" lastFinishedPulling="2025-12-03 19:42:57.488823208 +0000 UTC m=+793.451633474" observedRunningTime="2025-12-03 19:42:58.011317525 +0000 UTC m=+793.974127811" watchObservedRunningTime="2025-12-03 19:42:58.014433009 +0000 UTC m=+793.977243285" Dec 03 19:42:58 crc kubenswrapper[4916]: I1203 19:42:58.952625 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" event={"ID":"9ce1f525-9698-4837-8ad5-990c187fd275","Type":"ContainerStarted","Data":"f3eef33cc63e3155e1793bc3b1b20b5ae5408b8fd3a2b711ba73cb7bb813b3e8"} Dec 03 19:42:58 crc kubenswrapper[4916]: I1203 19:42:58.984482 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-gbx7d" podStartSLOduration=1.705841102 podStartE2EDuration="3.98445669s" podCreationTimestamp="2025-12-03 19:42:55 +0000 UTC" firstStartedPulling="2025-12-03 19:42:56.286141034 +0000 UTC m=+792.248951310" lastFinishedPulling="2025-12-03 19:42:58.564756632 +0000 UTC m=+794.527566898" observedRunningTime="2025-12-03 19:42:58.979553217 +0000 UTC m=+794.942363483" watchObservedRunningTime="2025-12-03 19:42:58.98445669 +0000 UTC m=+794.947266996" Dec 03 19:42:59 crc kubenswrapper[4916]: I1203 19:42:59.972489 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn" event={"ID":"a64200e6-4761-4d12-b787-7e0260253ffd","Type":"ContainerStarted","Data":"79139b853456d7ebcf4bdc7100618a06721f4144286654f602b09310635c6cc2"} Dec 03 19:43:00 crc kubenswrapper[4916]: I1203 19:43:00.003276 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-nq5wn" podStartSLOduration=2.204322964 podStartE2EDuration="6.003259675s" podCreationTimestamp="2025-12-03 19:42:54 +0000 UTC" firstStartedPulling="2025-12-03 19:42:55.784380679 +0000 UTC m=+791.747190955" lastFinishedPulling="2025-12-03 19:42:59.5833174 +0000 UTC m=+795.546127666" observedRunningTime="2025-12-03 19:43:00.000269304 +0000 UTC m=+795.963079630" watchObservedRunningTime="2025-12-03 19:43:00.003259675 +0000 UTC m=+795.966069941" Dec 03 19:43:05 crc kubenswrapper[4916]: I1203 19:43:05.399738 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-gnlxq" Dec 03 19:43:05 crc kubenswrapper[4916]: I1203 19:43:05.673123 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:43:05 crc kubenswrapper[4916]: I1203 19:43:05.673212 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:43:05 crc kubenswrapper[4916]: I1203 19:43:05.681411 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:43:06 crc kubenswrapper[4916]: I1203 19:43:06.019510 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6589bd55cf-lx7qn" Dec 03 19:43:06 crc kubenswrapper[4916]: I1203 19:43:06.095956 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-cdztl"] Dec 03 19:43:15 crc kubenswrapper[4916]: I1203 19:43:15.962113 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-tqmwt" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.222745 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4"] Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.224461 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.227262 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.239061 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4"] Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.316298 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.316343 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkgmx\" (UniqueName: \"kubernetes.io/projected/e7a6167d-5055-41db-b447-c72af54b8f9b-kube-api-access-hkgmx\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.316526 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.418057 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.418274 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.418311 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkgmx\" (UniqueName: \"kubernetes.io/projected/e7a6167d-5055-41db-b447-c72af54b8f9b-kube-api-access-hkgmx\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.418691 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.419144 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.447280 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkgmx\" (UniqueName: \"kubernetes.io/projected/e7a6167d-5055-41db-b447-c72af54b8f9b-kube-api-access-hkgmx\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.540214 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:30 crc kubenswrapper[4916]: I1203 19:43:30.835335 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4"] Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.169043 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-cdztl" podUID="1d18bd86-a58f-451c-90c0-9fa9834c6d77" containerName="console" containerID="cri-o://870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5" gracePeriod=15 Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.202092 4916 generic.go:334] "Generic (PLEG): container finished" podID="e7a6167d-5055-41db-b447-c72af54b8f9b" containerID="380f9e3c89e659719494aa2d5e946505b70a9204171b5f02bf3e1bc660abbac5" exitCode=0 Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.202158 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" event={"ID":"e7a6167d-5055-41db-b447-c72af54b8f9b","Type":"ContainerDied","Data":"380f9e3c89e659719494aa2d5e946505b70a9204171b5f02bf3e1bc660abbac5"} Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.202200 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" event={"ID":"e7a6167d-5055-41db-b447-c72af54b8f9b","Type":"ContainerStarted","Data":"1e939eb7e265b023a2f3cd2443c1d724cbab9e4663cf2b62a44775882c0db88f"} Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.627995 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-cdztl_1d18bd86-a58f-451c-90c0-9fa9834c6d77/console/0.log" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.628088 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.636978 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-service-ca\") pod \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.637100 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-trusted-ca-bundle\") pod \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.637146 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsdnf\" (UniqueName: \"kubernetes.io/projected/1d18bd86-a58f-451c-90c0-9fa9834c6d77-kube-api-access-nsdnf\") pod \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.637177 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-config\") pod \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.637249 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-oauth-config\") pod \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.637283 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-oauth-serving-cert\") pod \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.637330 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-serving-cert\") pod \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\" (UID: \"1d18bd86-a58f-451c-90c0-9fa9834c6d77\") " Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.638335 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-service-ca" (OuterVolumeSpecName: "service-ca") pod "1d18bd86-a58f-451c-90c0-9fa9834c6d77" (UID: "1d18bd86-a58f-451c-90c0-9fa9834c6d77"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.638365 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1d18bd86-a58f-451c-90c0-9fa9834c6d77" (UID: "1d18bd86-a58f-451c-90c0-9fa9834c6d77"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.638441 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "1d18bd86-a58f-451c-90c0-9fa9834c6d77" (UID: "1d18bd86-a58f-451c-90c0-9fa9834c6d77"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.638984 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-config" (OuterVolumeSpecName: "console-config") pod "1d18bd86-a58f-451c-90c0-9fa9834c6d77" (UID: "1d18bd86-a58f-451c-90c0-9fa9834c6d77"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.650961 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "1d18bd86-a58f-451c-90c0-9fa9834c6d77" (UID: "1d18bd86-a58f-451c-90c0-9fa9834c6d77"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.651012 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d18bd86-a58f-451c-90c0-9fa9834c6d77-kube-api-access-nsdnf" (OuterVolumeSpecName: "kube-api-access-nsdnf") pod "1d18bd86-a58f-451c-90c0-9fa9834c6d77" (UID: "1d18bd86-a58f-451c-90c0-9fa9834c6d77"). InnerVolumeSpecName "kube-api-access-nsdnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.655152 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "1d18bd86-a58f-451c-90c0-9fa9834c6d77" (UID: "1d18bd86-a58f-451c-90c0-9fa9834c6d77"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.738933 4916 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.738977 4916 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.738995 4916 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.739012 4916 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.739027 4916 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.739043 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsdnf\" (UniqueName: \"kubernetes.io/projected/1d18bd86-a58f-451c-90c0-9fa9834c6d77-kube-api-access-nsdnf\") on node \"crc\" DevicePath \"\"" Dec 03 19:43:31 crc kubenswrapper[4916]: I1203 19:43:31.739058 4916 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/1d18bd86-a58f-451c-90c0-9fa9834c6d77-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.211486 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" event={"ID":"e7a6167d-5055-41db-b447-c72af54b8f9b","Type":"ContainerStarted","Data":"f4c93bb4754e567c05840e8f0a401dd3f3566bea0d85bcf9db8ff990b6aff035"} Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.214412 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-cdztl_1d18bd86-a58f-451c-90c0-9fa9834c6d77/console/0.log" Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.214488 4916 generic.go:334] "Generic (PLEG): container finished" podID="1d18bd86-a58f-451c-90c0-9fa9834c6d77" containerID="870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5" exitCode=2 Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.214535 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cdztl" event={"ID":"1d18bd86-a58f-451c-90c0-9fa9834c6d77","Type":"ContainerDied","Data":"870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5"} Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.214638 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-cdztl" Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.214920 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-cdztl" event={"ID":"1d18bd86-a58f-451c-90c0-9fa9834c6d77","Type":"ContainerDied","Data":"c97b80f38f1baf4123412641cf871628ad32b1a15459c1155323663edb39c956"} Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.214969 4916 scope.go:117] "RemoveContainer" containerID="870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5" Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.264894 4916 scope.go:117] "RemoveContainer" containerID="870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5" Dec 03 19:43:32 crc kubenswrapper[4916]: E1203 19:43:32.265742 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5\": container with ID starting with 870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5 not found: ID does not exist" containerID="870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5" Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.265793 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5"} err="failed to get container status \"870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5\": rpc error: code = NotFound desc = could not find container \"870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5\": container with ID starting with 870dae8fa5c0f5259941e8c41c565845c5f5a232d4356cb973a0cedcd797f5e5 not found: ID does not exist" Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.268485 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-cdztl"] Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.272961 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-cdztl"] Dec 03 19:43:32 crc kubenswrapper[4916]: I1203 19:43:32.493171 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d18bd86-a58f-451c-90c0-9fa9834c6d77" path="/var/lib/kubelet/pods/1d18bd86-a58f-451c-90c0-9fa9834c6d77/volumes" Dec 03 19:43:33 crc kubenswrapper[4916]: I1203 19:43:33.224706 4916 generic.go:334] "Generic (PLEG): container finished" podID="e7a6167d-5055-41db-b447-c72af54b8f9b" containerID="f4c93bb4754e567c05840e8f0a401dd3f3566bea0d85bcf9db8ff990b6aff035" exitCode=0 Dec 03 19:43:33 crc kubenswrapper[4916]: I1203 19:43:33.224791 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" event={"ID":"e7a6167d-5055-41db-b447-c72af54b8f9b","Type":"ContainerDied","Data":"f4c93bb4754e567c05840e8f0a401dd3f3566bea0d85bcf9db8ff990b6aff035"} Dec 03 19:43:34 crc kubenswrapper[4916]: I1203 19:43:34.242817 4916 generic.go:334] "Generic (PLEG): container finished" podID="e7a6167d-5055-41db-b447-c72af54b8f9b" containerID="928a0c3cdf63a19543e7347925add83baafbdacbfe92ddde70b79a573d6d0796" exitCode=0 Dec 03 19:43:34 crc kubenswrapper[4916]: I1203 19:43:34.242888 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" event={"ID":"e7a6167d-5055-41db-b447-c72af54b8f9b","Type":"ContainerDied","Data":"928a0c3cdf63a19543e7347925add83baafbdacbfe92ddde70b79a573d6d0796"} Dec 03 19:43:35 crc kubenswrapper[4916]: I1203 19:43:35.592022 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:35 crc kubenswrapper[4916]: I1203 19:43:35.594143 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkgmx\" (UniqueName: \"kubernetes.io/projected/e7a6167d-5055-41db-b447-c72af54b8f9b-kube-api-access-hkgmx\") pod \"e7a6167d-5055-41db-b447-c72af54b8f9b\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " Dec 03 19:43:35 crc kubenswrapper[4916]: I1203 19:43:35.594248 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-bundle\") pod \"e7a6167d-5055-41db-b447-c72af54b8f9b\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " Dec 03 19:43:35 crc kubenswrapper[4916]: I1203 19:43:35.594459 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-util\") pod \"e7a6167d-5055-41db-b447-c72af54b8f9b\" (UID: \"e7a6167d-5055-41db-b447-c72af54b8f9b\") " Dec 03 19:43:35 crc kubenswrapper[4916]: I1203 19:43:35.596616 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-bundle" (OuterVolumeSpecName: "bundle") pod "e7a6167d-5055-41db-b447-c72af54b8f9b" (UID: "e7a6167d-5055-41db-b447-c72af54b8f9b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:43:35 crc kubenswrapper[4916]: I1203 19:43:35.603793 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7a6167d-5055-41db-b447-c72af54b8f9b-kube-api-access-hkgmx" (OuterVolumeSpecName: "kube-api-access-hkgmx") pod "e7a6167d-5055-41db-b447-c72af54b8f9b" (UID: "e7a6167d-5055-41db-b447-c72af54b8f9b"). InnerVolumeSpecName "kube-api-access-hkgmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:43:35 crc kubenswrapper[4916]: I1203 19:43:35.627339 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-util" (OuterVolumeSpecName: "util") pod "e7a6167d-5055-41db-b447-c72af54b8f9b" (UID: "e7a6167d-5055-41db-b447-c72af54b8f9b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:43:35 crc kubenswrapper[4916]: I1203 19:43:35.696548 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkgmx\" (UniqueName: \"kubernetes.io/projected/e7a6167d-5055-41db-b447-c72af54b8f9b-kube-api-access-hkgmx\") on node \"crc\" DevicePath \"\"" Dec 03 19:43:35 crc kubenswrapper[4916]: I1203 19:43:35.696633 4916 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:43:35 crc kubenswrapper[4916]: I1203 19:43:35.696656 4916 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e7a6167d-5055-41db-b447-c72af54b8f9b-util\") on node \"crc\" DevicePath \"\"" Dec 03 19:43:36 crc kubenswrapper[4916]: I1203 19:43:36.259356 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" event={"ID":"e7a6167d-5055-41db-b447-c72af54b8f9b","Type":"ContainerDied","Data":"1e939eb7e265b023a2f3cd2443c1d724cbab9e4663cf2b62a44775882c0db88f"} Dec 03 19:43:36 crc kubenswrapper[4916]: I1203 19:43:36.259405 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e939eb7e265b023a2f3cd2443c1d724cbab9e4663cf2b62a44775882c0db88f" Dec 03 19:43:36 crc kubenswrapper[4916]: I1203 19:43:36.259439 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.944505 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j"] Dec 03 19:43:44 crc kubenswrapper[4916]: E1203 19:43:44.946703 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d18bd86-a58f-451c-90c0-9fa9834c6d77" containerName="console" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.946814 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d18bd86-a58f-451c-90c0-9fa9834c6d77" containerName="console" Dec 03 19:43:44 crc kubenswrapper[4916]: E1203 19:43:44.946930 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a6167d-5055-41db-b447-c72af54b8f9b" containerName="util" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.947027 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a6167d-5055-41db-b447-c72af54b8f9b" containerName="util" Dec 03 19:43:44 crc kubenswrapper[4916]: E1203 19:43:44.947126 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a6167d-5055-41db-b447-c72af54b8f9b" containerName="extract" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.947217 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a6167d-5055-41db-b447-c72af54b8f9b" containerName="extract" Dec 03 19:43:44 crc kubenswrapper[4916]: E1203 19:43:44.947329 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7a6167d-5055-41db-b447-c72af54b8f9b" containerName="pull" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.947420 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7a6167d-5055-41db-b447-c72af54b8f9b" containerName="pull" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.947705 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7a6167d-5055-41db-b447-c72af54b8f9b" containerName="extract" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.947811 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d18bd86-a58f-451c-90c0-9fa9834c6d77" containerName="console" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.948577 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.951035 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.951732 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.952086 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.953030 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-7ghrt" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.963009 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 03 19:43:44 crc kubenswrapper[4916]: I1203 19:43:44.979378 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j"] Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.129928 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnxvh\" (UniqueName: \"kubernetes.io/projected/75320d0a-a179-4fd1-8e6c-46dd6a8e88b1-kube-api-access-mnxvh\") pod \"metallb-operator-controller-manager-f96f55954-vbv4j\" (UID: \"75320d0a-a179-4fd1-8e6c-46dd6a8e88b1\") " pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.129968 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75320d0a-a179-4fd1-8e6c-46dd6a8e88b1-webhook-cert\") pod \"metallb-operator-controller-manager-f96f55954-vbv4j\" (UID: \"75320d0a-a179-4fd1-8e6c-46dd6a8e88b1\") " pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.130298 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75320d0a-a179-4fd1-8e6c-46dd6a8e88b1-apiservice-cert\") pod \"metallb-operator-controller-manager-f96f55954-vbv4j\" (UID: \"75320d0a-a179-4fd1-8e6c-46dd6a8e88b1\") " pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.231373 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75320d0a-a179-4fd1-8e6c-46dd6a8e88b1-apiservice-cert\") pod \"metallb-operator-controller-manager-f96f55954-vbv4j\" (UID: \"75320d0a-a179-4fd1-8e6c-46dd6a8e88b1\") " pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.231657 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75320d0a-a179-4fd1-8e6c-46dd6a8e88b1-webhook-cert\") pod \"metallb-operator-controller-manager-f96f55954-vbv4j\" (UID: \"75320d0a-a179-4fd1-8e6c-46dd6a8e88b1\") " pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.231727 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnxvh\" (UniqueName: \"kubernetes.io/projected/75320d0a-a179-4fd1-8e6c-46dd6a8e88b1-kube-api-access-mnxvh\") pod \"metallb-operator-controller-manager-f96f55954-vbv4j\" (UID: \"75320d0a-a179-4fd1-8e6c-46dd6a8e88b1\") " pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.237827 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/75320d0a-a179-4fd1-8e6c-46dd6a8e88b1-webhook-cert\") pod \"metallb-operator-controller-manager-f96f55954-vbv4j\" (UID: \"75320d0a-a179-4fd1-8e6c-46dd6a8e88b1\") " pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.240661 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/75320d0a-a179-4fd1-8e6c-46dd6a8e88b1-apiservice-cert\") pod \"metallb-operator-controller-manager-f96f55954-vbv4j\" (UID: \"75320d0a-a179-4fd1-8e6c-46dd6a8e88b1\") " pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.253866 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnxvh\" (UniqueName: \"kubernetes.io/projected/75320d0a-a179-4fd1-8e6c-46dd6a8e88b1-kube-api-access-mnxvh\") pod \"metallb-operator-controller-manager-f96f55954-vbv4j\" (UID: \"75320d0a-a179-4fd1-8e6c-46dd6a8e88b1\") " pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.263709 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb"] Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.263872 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.264595 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.266739 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.267214 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-zp5rd" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.269197 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.286051 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb"] Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.434712 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b608b106-b706-4e13-9e78-6962d5346432-webhook-cert\") pod \"metallb-operator-webhook-server-6647df69ff-4xrgb\" (UID: \"b608b106-b706-4e13-9e78-6962d5346432\") " pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.435156 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b608b106-b706-4e13-9e78-6962d5346432-apiservice-cert\") pod \"metallb-operator-webhook-server-6647df69ff-4xrgb\" (UID: \"b608b106-b706-4e13-9e78-6962d5346432\") " pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.435185 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6fkm\" (UniqueName: \"kubernetes.io/projected/b608b106-b706-4e13-9e78-6962d5346432-kube-api-access-w6fkm\") pod \"metallb-operator-webhook-server-6647df69ff-4xrgb\" (UID: \"b608b106-b706-4e13-9e78-6962d5346432\") " pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.506996 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j"] Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.537244 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b608b106-b706-4e13-9e78-6962d5346432-apiservice-cert\") pod \"metallb-operator-webhook-server-6647df69ff-4xrgb\" (UID: \"b608b106-b706-4e13-9e78-6962d5346432\") " pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.537286 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6fkm\" (UniqueName: \"kubernetes.io/projected/b608b106-b706-4e13-9e78-6962d5346432-kube-api-access-w6fkm\") pod \"metallb-operator-webhook-server-6647df69ff-4xrgb\" (UID: \"b608b106-b706-4e13-9e78-6962d5346432\") " pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.537320 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b608b106-b706-4e13-9e78-6962d5346432-webhook-cert\") pod \"metallb-operator-webhook-server-6647df69ff-4xrgb\" (UID: \"b608b106-b706-4e13-9e78-6962d5346432\") " pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.542319 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b608b106-b706-4e13-9e78-6962d5346432-webhook-cert\") pod \"metallb-operator-webhook-server-6647df69ff-4xrgb\" (UID: \"b608b106-b706-4e13-9e78-6962d5346432\") " pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.542548 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b608b106-b706-4e13-9e78-6962d5346432-apiservice-cert\") pod \"metallb-operator-webhook-server-6647df69ff-4xrgb\" (UID: \"b608b106-b706-4e13-9e78-6962d5346432\") " pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.552721 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6fkm\" (UniqueName: \"kubernetes.io/projected/b608b106-b706-4e13-9e78-6962d5346432-kube-api-access-w6fkm\") pod \"metallb-operator-webhook-server-6647df69ff-4xrgb\" (UID: \"b608b106-b706-4e13-9e78-6962d5346432\") " pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:45 crc kubenswrapper[4916]: I1203 19:43:45.608928 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:46 crc kubenswrapper[4916]: I1203 19:43:46.044632 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb"] Dec 03 19:43:46 crc kubenswrapper[4916]: W1203 19:43:46.048805 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb608b106_b706_4e13_9e78_6962d5346432.slice/crio-a153a99b03ef2b6793d457b6f7464f91d09c03a92d9541a0653e579eede372d2 WatchSource:0}: Error finding container a153a99b03ef2b6793d457b6f7464f91d09c03a92d9541a0653e579eede372d2: Status 404 returned error can't find the container with id a153a99b03ef2b6793d457b6f7464f91d09c03a92d9541a0653e579eede372d2 Dec 03 19:43:46 crc kubenswrapper[4916]: I1203 19:43:46.329907 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" event={"ID":"b608b106-b706-4e13-9e78-6962d5346432","Type":"ContainerStarted","Data":"a153a99b03ef2b6793d457b6f7464f91d09c03a92d9541a0653e579eede372d2"} Dec 03 19:43:46 crc kubenswrapper[4916]: I1203 19:43:46.331252 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" event={"ID":"75320d0a-a179-4fd1-8e6c-46dd6a8e88b1","Type":"ContainerStarted","Data":"61b2e5d45c0f8e821580b554397d2ff234076c8d186dfc0a36aa6159d3695aef"} Dec 03 19:43:48 crc kubenswrapper[4916]: I1203 19:43:48.342464 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" event={"ID":"75320d0a-a179-4fd1-8e6c-46dd6a8e88b1","Type":"ContainerStarted","Data":"3118b8d29e9fd4b30688f2c5b3d04dd980121b56d815e4b0606d75a0de4ff83e"} Dec 03 19:43:48 crc kubenswrapper[4916]: I1203 19:43:48.342791 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:43:48 crc kubenswrapper[4916]: I1203 19:43:48.366252 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" podStartSLOduration=1.944716916 podStartE2EDuration="4.366237611s" podCreationTimestamp="2025-12-03 19:43:44 +0000 UTC" firstStartedPulling="2025-12-03 19:43:45.522838053 +0000 UTC m=+841.485648329" lastFinishedPulling="2025-12-03 19:43:47.944358748 +0000 UTC m=+843.907169024" observedRunningTime="2025-12-03 19:43:48.363972339 +0000 UTC m=+844.326782625" watchObservedRunningTime="2025-12-03 19:43:48.366237611 +0000 UTC m=+844.329047867" Dec 03 19:43:50 crc kubenswrapper[4916]: I1203 19:43:50.360376 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" event={"ID":"b608b106-b706-4e13-9e78-6962d5346432","Type":"ContainerStarted","Data":"20b0b630424e662ccdf5e464c1ad392c60d0fb3a6aabeb4e88c5ce019859a8e8"} Dec 03 19:43:50 crc kubenswrapper[4916]: I1203 19:43:50.360660 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:43:50 crc kubenswrapper[4916]: I1203 19:43:50.387377 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" podStartSLOduration=1.503650326 podStartE2EDuration="5.387358603s" podCreationTimestamp="2025-12-03 19:43:45 +0000 UTC" firstStartedPulling="2025-12-03 19:43:46.051927089 +0000 UTC m=+842.014737355" lastFinishedPulling="2025-12-03 19:43:49.935635356 +0000 UTC m=+845.898445632" observedRunningTime="2025-12-03 19:43:50.384892086 +0000 UTC m=+846.347702352" watchObservedRunningTime="2025-12-03 19:43:50.387358603 +0000 UTC m=+846.350168869" Dec 03 19:44:03 crc kubenswrapper[4916]: I1203 19:44:03.817797 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6kzd8"] Dec 03 19:44:03 crc kubenswrapper[4916]: I1203 19:44:03.821072 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:03 crc kubenswrapper[4916]: I1203 19:44:03.828031 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6kzd8"] Dec 03 19:44:03 crc kubenswrapper[4916]: I1203 19:44:03.918100 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-utilities\") pod \"community-operators-6kzd8\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:03 crc kubenswrapper[4916]: I1203 19:44:03.918417 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-catalog-content\") pod \"community-operators-6kzd8\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:03 crc kubenswrapper[4916]: I1203 19:44:03.918684 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9p7w\" (UniqueName: \"kubernetes.io/projected/74e54e30-ddaf-4a76-8f09-936209021a0a-kube-api-access-v9p7w\") pod \"community-operators-6kzd8\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:04 crc kubenswrapper[4916]: I1203 19:44:04.020260 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-utilities\") pod \"community-operators-6kzd8\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:04 crc kubenswrapper[4916]: I1203 19:44:04.020649 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-catalog-content\") pod \"community-operators-6kzd8\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:04 crc kubenswrapper[4916]: I1203 19:44:04.020701 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9p7w\" (UniqueName: \"kubernetes.io/projected/74e54e30-ddaf-4a76-8f09-936209021a0a-kube-api-access-v9p7w\") pod \"community-operators-6kzd8\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:04 crc kubenswrapper[4916]: I1203 19:44:04.021042 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-utilities\") pod \"community-operators-6kzd8\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:04 crc kubenswrapper[4916]: I1203 19:44:04.021183 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-catalog-content\") pod \"community-operators-6kzd8\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:04 crc kubenswrapper[4916]: I1203 19:44:04.048920 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9p7w\" (UniqueName: \"kubernetes.io/projected/74e54e30-ddaf-4a76-8f09-936209021a0a-kube-api-access-v9p7w\") pod \"community-operators-6kzd8\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:04 crc kubenswrapper[4916]: I1203 19:44:04.195240 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:04 crc kubenswrapper[4916]: I1203 19:44:04.505102 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6kzd8"] Dec 03 19:44:05 crc kubenswrapper[4916]: I1203 19:44:05.465153 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kzd8" event={"ID":"74e54e30-ddaf-4a76-8f09-936209021a0a","Type":"ContainerStarted","Data":"6a6b934897f27f5656332d1df88a19d2538851f217553b9fa3009ad3e7d3270a"} Dec 03 19:44:05 crc kubenswrapper[4916]: I1203 19:44:05.465627 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kzd8" event={"ID":"74e54e30-ddaf-4a76-8f09-936209021a0a","Type":"ContainerStarted","Data":"3755027985a4ef8dbb77d62f3eebe3ffbd8e0799fe749894e93097e59c7142c8"} Dec 03 19:44:05 crc kubenswrapper[4916]: I1203 19:44:05.615740 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6647df69ff-4xrgb" Dec 03 19:44:06 crc kubenswrapper[4916]: I1203 19:44:06.474169 4916 generic.go:334] "Generic (PLEG): container finished" podID="74e54e30-ddaf-4a76-8f09-936209021a0a" containerID="6a6b934897f27f5656332d1df88a19d2538851f217553b9fa3009ad3e7d3270a" exitCode=0 Dec 03 19:44:06 crc kubenswrapper[4916]: I1203 19:44:06.474278 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kzd8" event={"ID":"74e54e30-ddaf-4a76-8f09-936209021a0a","Type":"ContainerDied","Data":"6a6b934897f27f5656332d1df88a19d2538851f217553b9fa3009ad3e7d3270a"} Dec 03 19:44:07 crc kubenswrapper[4916]: I1203 19:44:07.482053 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kzd8" event={"ID":"74e54e30-ddaf-4a76-8f09-936209021a0a","Type":"ContainerStarted","Data":"520e59436123b9cb70060e27f121abb3daea3312a0ca0cbd1b6ffc7eafccc328"} Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.396424 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gzgss"] Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.398184 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.414533 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gzgss"] Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.482483 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-utilities\") pod \"redhat-marketplace-gzgss\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.482551 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-catalog-content\") pod \"redhat-marketplace-gzgss\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.482604 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wn5nq\" (UniqueName: \"kubernetes.io/projected/5f97372f-3800-474d-aab1-dfc0ee62fc4f-kube-api-access-wn5nq\") pod \"redhat-marketplace-gzgss\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.489793 4916 generic.go:334] "Generic (PLEG): container finished" podID="74e54e30-ddaf-4a76-8f09-936209021a0a" containerID="520e59436123b9cb70060e27f121abb3daea3312a0ca0cbd1b6ffc7eafccc328" exitCode=0 Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.489841 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kzd8" event={"ID":"74e54e30-ddaf-4a76-8f09-936209021a0a","Type":"ContainerDied","Data":"520e59436123b9cb70060e27f121abb3daea3312a0ca0cbd1b6ffc7eafccc328"} Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.583699 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-utilities\") pod \"redhat-marketplace-gzgss\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.583772 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-catalog-content\") pod \"redhat-marketplace-gzgss\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.583804 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wn5nq\" (UniqueName: \"kubernetes.io/projected/5f97372f-3800-474d-aab1-dfc0ee62fc4f-kube-api-access-wn5nq\") pod \"redhat-marketplace-gzgss\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.585081 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-utilities\") pod \"redhat-marketplace-gzgss\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.585402 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-catalog-content\") pod \"redhat-marketplace-gzgss\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.615282 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wn5nq\" (UniqueName: \"kubernetes.io/projected/5f97372f-3800-474d-aab1-dfc0ee62fc4f-kube-api-access-wn5nq\") pod \"redhat-marketplace-gzgss\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.722461 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:08 crc kubenswrapper[4916]: I1203 19:44:08.937476 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gzgss"] Dec 03 19:44:08 crc kubenswrapper[4916]: W1203 19:44:08.949970 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f97372f_3800_474d_aab1_dfc0ee62fc4f.slice/crio-3ba0ce30baa550ff794ee597c36bc62751f680ca2b07c24e98cf45fbadfdcaba WatchSource:0}: Error finding container 3ba0ce30baa550ff794ee597c36bc62751f680ca2b07c24e98cf45fbadfdcaba: Status 404 returned error can't find the container with id 3ba0ce30baa550ff794ee597c36bc62751f680ca2b07c24e98cf45fbadfdcaba Dec 03 19:44:09 crc kubenswrapper[4916]: I1203 19:44:09.498489 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kzd8" event={"ID":"74e54e30-ddaf-4a76-8f09-936209021a0a","Type":"ContainerStarted","Data":"515a447c967bed73c84fe3697da7229859bc43c663569e3656fadcc0a56f34ca"} Dec 03 19:44:09 crc kubenswrapper[4916]: I1203 19:44:09.502439 4916 generic.go:334] "Generic (PLEG): container finished" podID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" containerID="9d70cab95443dc3033a12b9433f4a7903b7cdbf83c7b369270cce7cb993672ab" exitCode=0 Dec 03 19:44:09 crc kubenswrapper[4916]: I1203 19:44:09.502529 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzgss" event={"ID":"5f97372f-3800-474d-aab1-dfc0ee62fc4f","Type":"ContainerDied","Data":"9d70cab95443dc3033a12b9433f4a7903b7cdbf83c7b369270cce7cb993672ab"} Dec 03 19:44:09 crc kubenswrapper[4916]: I1203 19:44:09.502642 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzgss" event={"ID":"5f97372f-3800-474d-aab1-dfc0ee62fc4f","Type":"ContainerStarted","Data":"3ba0ce30baa550ff794ee597c36bc62751f680ca2b07c24e98cf45fbadfdcaba"} Dec 03 19:44:09 crc kubenswrapper[4916]: I1203 19:44:09.524309 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6kzd8" podStartSLOduration=4.108028355 podStartE2EDuration="6.524289154s" podCreationTimestamp="2025-12-03 19:44:03 +0000 UTC" firstStartedPulling="2025-12-03 19:44:06.477543624 +0000 UTC m=+862.440353920" lastFinishedPulling="2025-12-03 19:44:08.893804453 +0000 UTC m=+864.856614719" observedRunningTime="2025-12-03 19:44:09.523586425 +0000 UTC m=+865.486396691" watchObservedRunningTime="2025-12-03 19:44:09.524289154 +0000 UTC m=+865.487099420" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.201515 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nwc9w"] Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.203102 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.219047 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nwc9w"] Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.309335 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-utilities\") pod \"certified-operators-nwc9w\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.309606 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-catalog-content\") pod \"certified-operators-nwc9w\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.309647 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvdzs\" (UniqueName: \"kubernetes.io/projected/f1087e77-3755-41b6-8c3c-868a398d085f-kube-api-access-vvdzs\") pod \"certified-operators-nwc9w\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.411419 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-utilities\") pod \"certified-operators-nwc9w\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.411469 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-catalog-content\") pod \"certified-operators-nwc9w\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.411503 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvdzs\" (UniqueName: \"kubernetes.io/projected/f1087e77-3755-41b6-8c3c-868a398d085f-kube-api-access-vvdzs\") pod \"certified-operators-nwc9w\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.412023 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-utilities\") pod \"certified-operators-nwc9w\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.412130 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-catalog-content\") pod \"certified-operators-nwc9w\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.450342 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvdzs\" (UniqueName: \"kubernetes.io/projected/f1087e77-3755-41b6-8c3c-868a398d085f-kube-api-access-vvdzs\") pod \"certified-operators-nwc9w\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.512753 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzgss" event={"ID":"5f97372f-3800-474d-aab1-dfc0ee62fc4f","Type":"ContainerStarted","Data":"d36d4d921c429dbf744004faaf37e0c0a77903794c8764e2fefdd5ab54a64196"} Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.527687 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:10 crc kubenswrapper[4916]: I1203 19:44:10.736947 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nwc9w"] Dec 03 19:44:10 crc kubenswrapper[4916]: W1203 19:44:10.738113 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1087e77_3755_41b6_8c3c_868a398d085f.slice/crio-f3815772b9980d83441fb5e4663bd988d13e14b89187ce5f894adace77313aba WatchSource:0}: Error finding container f3815772b9980d83441fb5e4663bd988d13e14b89187ce5f894adace77313aba: Status 404 returned error can't find the container with id f3815772b9980d83441fb5e4663bd988d13e14b89187ce5f894adace77313aba Dec 03 19:44:11 crc kubenswrapper[4916]: I1203 19:44:11.519750 4916 generic.go:334] "Generic (PLEG): container finished" podID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" containerID="d36d4d921c429dbf744004faaf37e0c0a77903794c8764e2fefdd5ab54a64196" exitCode=0 Dec 03 19:44:11 crc kubenswrapper[4916]: I1203 19:44:11.519803 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzgss" event={"ID":"5f97372f-3800-474d-aab1-dfc0ee62fc4f","Type":"ContainerDied","Data":"d36d4d921c429dbf744004faaf37e0c0a77903794c8764e2fefdd5ab54a64196"} Dec 03 19:44:11 crc kubenswrapper[4916]: I1203 19:44:11.523327 4916 generic.go:334] "Generic (PLEG): container finished" podID="f1087e77-3755-41b6-8c3c-868a398d085f" containerID="a3beb1d5540e65e596289ae48b849ec7188a0fca18445f95e23674f2f04e9a14" exitCode=0 Dec 03 19:44:11 crc kubenswrapper[4916]: I1203 19:44:11.523380 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwc9w" event={"ID":"f1087e77-3755-41b6-8c3c-868a398d085f","Type":"ContainerDied","Data":"a3beb1d5540e65e596289ae48b849ec7188a0fca18445f95e23674f2f04e9a14"} Dec 03 19:44:11 crc kubenswrapper[4916]: I1203 19:44:11.523468 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwc9w" event={"ID":"f1087e77-3755-41b6-8c3c-868a398d085f","Type":"ContainerStarted","Data":"f3815772b9980d83441fb5e4663bd988d13e14b89187ce5f894adace77313aba"} Dec 03 19:44:12 crc kubenswrapper[4916]: I1203 19:44:12.531656 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzgss" event={"ID":"5f97372f-3800-474d-aab1-dfc0ee62fc4f","Type":"ContainerStarted","Data":"f1bcd5ccbbc6f4596b6a057e0df5e757af3e10fe01defb745d818d738415ee74"} Dec 03 19:44:12 crc kubenswrapper[4916]: I1203 19:44:12.533642 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwc9w" event={"ID":"f1087e77-3755-41b6-8c3c-868a398d085f","Type":"ContainerStarted","Data":"6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0"} Dec 03 19:44:12 crc kubenswrapper[4916]: I1203 19:44:12.554157 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gzgss" podStartSLOduration=2.158784764 podStartE2EDuration="4.554131797s" podCreationTimestamp="2025-12-03 19:44:08 +0000 UTC" firstStartedPulling="2025-12-03 19:44:09.505043963 +0000 UTC m=+865.467854229" lastFinishedPulling="2025-12-03 19:44:11.900390956 +0000 UTC m=+867.863201262" observedRunningTime="2025-12-03 19:44:12.549681837 +0000 UTC m=+868.512492113" watchObservedRunningTime="2025-12-03 19:44:12.554131797 +0000 UTC m=+868.516942063" Dec 03 19:44:13 crc kubenswrapper[4916]: I1203 19:44:13.544161 4916 generic.go:334] "Generic (PLEG): container finished" podID="f1087e77-3755-41b6-8c3c-868a398d085f" containerID="6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0" exitCode=0 Dec 03 19:44:13 crc kubenswrapper[4916]: I1203 19:44:13.544279 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwc9w" event={"ID":"f1087e77-3755-41b6-8c3c-868a398d085f","Type":"ContainerDied","Data":"6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0"} Dec 03 19:44:14 crc kubenswrapper[4916]: I1203 19:44:14.195914 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:14 crc kubenswrapper[4916]: I1203 19:44:14.196368 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:14 crc kubenswrapper[4916]: I1203 19:44:14.263981 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:14 crc kubenswrapper[4916]: I1203 19:44:14.554751 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwc9w" event={"ID":"f1087e77-3755-41b6-8c3c-868a398d085f","Type":"ContainerStarted","Data":"661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423"} Dec 03 19:44:14 crc kubenswrapper[4916]: I1203 19:44:14.583511 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nwc9w" podStartSLOduration=2.138795604 podStartE2EDuration="4.583490191s" podCreationTimestamp="2025-12-03 19:44:10 +0000 UTC" firstStartedPulling="2025-12-03 19:44:11.526967977 +0000 UTC m=+867.489778283" lastFinishedPulling="2025-12-03 19:44:13.971662594 +0000 UTC m=+869.934472870" observedRunningTime="2025-12-03 19:44:14.58122413 +0000 UTC m=+870.544034436" watchObservedRunningTime="2025-12-03 19:44:14.583490191 +0000 UTC m=+870.546300497" Dec 03 19:44:14 crc kubenswrapper[4916]: I1203 19:44:14.641476 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:16 crc kubenswrapper[4916]: I1203 19:44:16.158469 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:44:16 crc kubenswrapper[4916]: I1203 19:44:16.158990 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:44:16 crc kubenswrapper[4916]: I1203 19:44:16.786389 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6kzd8"] Dec 03 19:44:16 crc kubenswrapper[4916]: I1203 19:44:16.786675 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6kzd8" podUID="74e54e30-ddaf-4a76-8f09-936209021a0a" containerName="registry-server" containerID="cri-o://515a447c967bed73c84fe3697da7229859bc43c663569e3656fadcc0a56f34ca" gracePeriod=2 Dec 03 19:44:17 crc kubenswrapper[4916]: I1203 19:44:17.582109 4916 generic.go:334] "Generic (PLEG): container finished" podID="74e54e30-ddaf-4a76-8f09-936209021a0a" containerID="515a447c967bed73c84fe3697da7229859bc43c663569e3656fadcc0a56f34ca" exitCode=0 Dec 03 19:44:17 crc kubenswrapper[4916]: I1203 19:44:17.582185 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kzd8" event={"ID":"74e54e30-ddaf-4a76-8f09-936209021a0a","Type":"ContainerDied","Data":"515a447c967bed73c84fe3697da7229859bc43c663569e3656fadcc0a56f34ca"} Dec 03 19:44:17 crc kubenswrapper[4916]: I1203 19:44:17.726509 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:17 crc kubenswrapper[4916]: I1203 19:44:17.826629 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-utilities\") pod \"74e54e30-ddaf-4a76-8f09-936209021a0a\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " Dec 03 19:44:17 crc kubenswrapper[4916]: I1203 19:44:17.826695 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9p7w\" (UniqueName: \"kubernetes.io/projected/74e54e30-ddaf-4a76-8f09-936209021a0a-kube-api-access-v9p7w\") pod \"74e54e30-ddaf-4a76-8f09-936209021a0a\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " Dec 03 19:44:17 crc kubenswrapper[4916]: I1203 19:44:17.827017 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-catalog-content\") pod \"74e54e30-ddaf-4a76-8f09-936209021a0a\" (UID: \"74e54e30-ddaf-4a76-8f09-936209021a0a\") " Dec 03 19:44:17 crc kubenswrapper[4916]: I1203 19:44:17.828246 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-utilities" (OuterVolumeSpecName: "utilities") pod "74e54e30-ddaf-4a76-8f09-936209021a0a" (UID: "74e54e30-ddaf-4a76-8f09-936209021a0a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:44:17 crc kubenswrapper[4916]: I1203 19:44:17.841887 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74e54e30-ddaf-4a76-8f09-936209021a0a-kube-api-access-v9p7w" (OuterVolumeSpecName: "kube-api-access-v9p7w") pod "74e54e30-ddaf-4a76-8f09-936209021a0a" (UID: "74e54e30-ddaf-4a76-8f09-936209021a0a"). InnerVolumeSpecName "kube-api-access-v9p7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:44:17 crc kubenswrapper[4916]: I1203 19:44:17.928661 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:44:17 crc kubenswrapper[4916]: I1203 19:44:17.928713 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9p7w\" (UniqueName: \"kubernetes.io/projected/74e54e30-ddaf-4a76-8f09-936209021a0a-kube-api-access-v9p7w\") on node \"crc\" DevicePath \"\"" Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.166428 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "74e54e30-ddaf-4a76-8f09-936209021a0a" (UID: "74e54e30-ddaf-4a76-8f09-936209021a0a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.233185 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74e54e30-ddaf-4a76-8f09-936209021a0a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.723348 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.723432 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.761071 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6kzd8" event={"ID":"74e54e30-ddaf-4a76-8f09-936209021a0a","Type":"ContainerDied","Data":"3755027985a4ef8dbb77d62f3eebe3ffbd8e0799fe749894e93097e59c7142c8"} Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.761157 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6kzd8" Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.761637 4916 scope.go:117] "RemoveContainer" containerID="515a447c967bed73c84fe3697da7229859bc43c663569e3656fadcc0a56f34ca" Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.789252 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6kzd8"] Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.794162 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6kzd8"] Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.795393 4916 scope.go:117] "RemoveContainer" containerID="520e59436123b9cb70060e27f121abb3daea3312a0ca0cbd1b6ffc7eafccc328" Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.803773 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.812585 4916 scope.go:117] "RemoveContainer" containerID="6a6b934897f27f5656332d1df88a19d2538851f217553b9fa3009ad3e7d3270a" Dec 03 19:44:18 crc kubenswrapper[4916]: I1203 19:44:18.879346 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:20 crc kubenswrapper[4916]: I1203 19:44:20.492196 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74e54e30-ddaf-4a76-8f09-936209021a0a" path="/var/lib/kubelet/pods/74e54e30-ddaf-4a76-8f09-936209021a0a/volumes" Dec 03 19:44:20 crc kubenswrapper[4916]: I1203 19:44:20.529148 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:20 crc kubenswrapper[4916]: I1203 19:44:20.529241 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:20 crc kubenswrapper[4916]: I1203 19:44:20.574177 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:20 crc kubenswrapper[4916]: I1203 19:44:20.878948 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:21 crc kubenswrapper[4916]: I1203 19:44:21.187460 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gzgss"] Dec 03 19:44:21 crc kubenswrapper[4916]: I1203 19:44:21.187829 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gzgss" podUID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" containerName="registry-server" containerID="cri-o://f1bcd5ccbbc6f4596b6a057e0df5e757af3e10fe01defb745d818d738415ee74" gracePeriod=2 Dec 03 19:44:21 crc kubenswrapper[4916]: I1203 19:44:21.790316 4916 generic.go:334] "Generic (PLEG): container finished" podID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" containerID="f1bcd5ccbbc6f4596b6a057e0df5e757af3e10fe01defb745d818d738415ee74" exitCode=0 Dec 03 19:44:21 crc kubenswrapper[4916]: I1203 19:44:21.790390 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzgss" event={"ID":"5f97372f-3800-474d-aab1-dfc0ee62fc4f","Type":"ContainerDied","Data":"f1bcd5ccbbc6f4596b6a057e0df5e757af3e10fe01defb745d818d738415ee74"} Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.188227 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.198215 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-utilities\") pod \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.198253 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-catalog-content\") pod \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.198307 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wn5nq\" (UniqueName: \"kubernetes.io/projected/5f97372f-3800-474d-aab1-dfc0ee62fc4f-kube-api-access-wn5nq\") pod \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\" (UID: \"5f97372f-3800-474d-aab1-dfc0ee62fc4f\") " Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.199125 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-utilities" (OuterVolumeSpecName: "utilities") pod "5f97372f-3800-474d-aab1-dfc0ee62fc4f" (UID: "5f97372f-3800-474d-aab1-dfc0ee62fc4f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.207553 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f97372f-3800-474d-aab1-dfc0ee62fc4f-kube-api-access-wn5nq" (OuterVolumeSpecName: "kube-api-access-wn5nq") pod "5f97372f-3800-474d-aab1-dfc0ee62fc4f" (UID: "5f97372f-3800-474d-aab1-dfc0ee62fc4f"). InnerVolumeSpecName "kube-api-access-wn5nq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.222816 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f97372f-3800-474d-aab1-dfc0ee62fc4f" (UID: "5f97372f-3800-474d-aab1-dfc0ee62fc4f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.299368 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.299409 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f97372f-3800-474d-aab1-dfc0ee62fc4f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.299426 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wn5nq\" (UniqueName: \"kubernetes.io/projected/5f97372f-3800-474d-aab1-dfc0ee62fc4f-kube-api-access-wn5nq\") on node \"crc\" DevicePath \"\"" Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.802842 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzgss" event={"ID":"5f97372f-3800-474d-aab1-dfc0ee62fc4f","Type":"ContainerDied","Data":"3ba0ce30baa550ff794ee597c36bc62751f680ca2b07c24e98cf45fbadfdcaba"} Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.802934 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gzgss" Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.803896 4916 scope.go:117] "RemoveContainer" containerID="f1bcd5ccbbc6f4596b6a057e0df5e757af3e10fe01defb745d818d738415ee74" Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.829830 4916 scope.go:117] "RemoveContainer" containerID="d36d4d921c429dbf744004faaf37e0c0a77903794c8764e2fefdd5ab54a64196" Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.837166 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gzgss"] Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.848131 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gzgss"] Dec 03 19:44:22 crc kubenswrapper[4916]: I1203 19:44:22.851759 4916 scope.go:117] "RemoveContainer" containerID="9d70cab95443dc3033a12b9433f4a7903b7cdbf83c7b369270cce7cb993672ab" Dec 03 19:44:23 crc kubenswrapper[4916]: I1203 19:44:23.587762 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nwc9w"] Dec 03 19:44:23 crc kubenswrapper[4916]: I1203 19:44:23.588618 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nwc9w" podUID="f1087e77-3755-41b6-8c3c-868a398d085f" containerName="registry-server" containerID="cri-o://661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423" gracePeriod=2 Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.495436 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" path="/var/lib/kubelet/pods/5f97372f-3800-474d-aab1-dfc0ee62fc4f/volumes" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.560681 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.731262 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvdzs\" (UniqueName: \"kubernetes.io/projected/f1087e77-3755-41b6-8c3c-868a398d085f-kube-api-access-vvdzs\") pod \"f1087e77-3755-41b6-8c3c-868a398d085f\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.731342 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-utilities\") pod \"f1087e77-3755-41b6-8c3c-868a398d085f\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.731422 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-catalog-content\") pod \"f1087e77-3755-41b6-8c3c-868a398d085f\" (UID: \"f1087e77-3755-41b6-8c3c-868a398d085f\") " Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.732778 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-utilities" (OuterVolumeSpecName: "utilities") pod "f1087e77-3755-41b6-8c3c-868a398d085f" (UID: "f1087e77-3755-41b6-8c3c-868a398d085f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.739766 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1087e77-3755-41b6-8c3c-868a398d085f-kube-api-access-vvdzs" (OuterVolumeSpecName: "kube-api-access-vvdzs") pod "f1087e77-3755-41b6-8c3c-868a398d085f" (UID: "f1087e77-3755-41b6-8c3c-868a398d085f"). InnerVolumeSpecName "kube-api-access-vvdzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.810735 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1087e77-3755-41b6-8c3c-868a398d085f" (UID: "f1087e77-3755-41b6-8c3c-868a398d085f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.825059 4916 generic.go:334] "Generic (PLEG): container finished" podID="f1087e77-3755-41b6-8c3c-868a398d085f" containerID="661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423" exitCode=0 Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.825131 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwc9w" event={"ID":"f1087e77-3755-41b6-8c3c-868a398d085f","Type":"ContainerDied","Data":"661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423"} Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.825205 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nwc9w" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.825241 4916 scope.go:117] "RemoveContainer" containerID="661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.825217 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nwc9w" event={"ID":"f1087e77-3755-41b6-8c3c-868a398d085f","Type":"ContainerDied","Data":"f3815772b9980d83441fb5e4663bd988d13e14b89187ce5f894adace77313aba"} Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.832755 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvdzs\" (UniqueName: \"kubernetes.io/projected/f1087e77-3755-41b6-8c3c-868a398d085f-kube-api-access-vvdzs\") on node \"crc\" DevicePath \"\"" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.832793 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.832804 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1087e77-3755-41b6-8c3c-868a398d085f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.849979 4916 scope.go:117] "RemoveContainer" containerID="6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.876287 4916 scope.go:117] "RemoveContainer" containerID="a3beb1d5540e65e596289ae48b849ec7188a0fca18445f95e23674f2f04e9a14" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.886739 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nwc9w"] Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.900595 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nwc9w"] Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.906438 4916 scope.go:117] "RemoveContainer" containerID="661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423" Dec 03 19:44:24 crc kubenswrapper[4916]: E1203 19:44:24.906812 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423\": container with ID starting with 661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423 not found: ID does not exist" containerID="661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.906844 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423"} err="failed to get container status \"661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423\": rpc error: code = NotFound desc = could not find container \"661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423\": container with ID starting with 661856d14d733fe65a3f1c24fc9c7b4c9448bed65853d307bd49d10cb4420423 not found: ID does not exist" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.906867 4916 scope.go:117] "RemoveContainer" containerID="6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0" Dec 03 19:44:24 crc kubenswrapper[4916]: E1203 19:44:24.907184 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0\": container with ID starting with 6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0 not found: ID does not exist" containerID="6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.907235 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0"} err="failed to get container status \"6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0\": rpc error: code = NotFound desc = could not find container \"6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0\": container with ID starting with 6846de39957a767088c69b22174ad01ae11377099302ff152af0fdad647f42a0 not found: ID does not exist" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.907248 4916 scope.go:117] "RemoveContainer" containerID="a3beb1d5540e65e596289ae48b849ec7188a0fca18445f95e23674f2f04e9a14" Dec 03 19:44:24 crc kubenswrapper[4916]: E1203 19:44:24.907560 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3beb1d5540e65e596289ae48b849ec7188a0fca18445f95e23674f2f04e9a14\": container with ID starting with a3beb1d5540e65e596289ae48b849ec7188a0fca18445f95e23674f2f04e9a14 not found: ID does not exist" containerID="a3beb1d5540e65e596289ae48b849ec7188a0fca18445f95e23674f2f04e9a14" Dec 03 19:44:24 crc kubenswrapper[4916]: I1203 19:44:24.907654 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3beb1d5540e65e596289ae48b849ec7188a0fca18445f95e23674f2f04e9a14"} err="failed to get container status \"a3beb1d5540e65e596289ae48b849ec7188a0fca18445f95e23674f2f04e9a14\": rpc error: code = NotFound desc = could not find container \"a3beb1d5540e65e596289ae48b849ec7188a0fca18445f95e23674f2f04e9a14\": container with ID starting with a3beb1d5540e65e596289ae48b849ec7188a0fca18445f95e23674f2f04e9a14 not found: ID does not exist" Dec 03 19:44:25 crc kubenswrapper[4916]: I1203 19:44:25.266861 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-f96f55954-vbv4j" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.275384 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv"] Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.276286 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" containerName="extract-utilities" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276307 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" containerName="extract-utilities" Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.276327 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" containerName="registry-server" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276338 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" containerName="registry-server" Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.276352 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1087e77-3755-41b6-8c3c-868a398d085f" containerName="extract-utilities" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276363 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1087e77-3755-41b6-8c3c-868a398d085f" containerName="extract-utilities" Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.276384 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74e54e30-ddaf-4a76-8f09-936209021a0a" containerName="extract-content" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276395 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="74e54e30-ddaf-4a76-8f09-936209021a0a" containerName="extract-content" Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.276407 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1087e77-3755-41b6-8c3c-868a398d085f" containerName="registry-server" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276419 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1087e77-3755-41b6-8c3c-868a398d085f" containerName="registry-server" Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.276444 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" containerName="extract-content" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276458 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" containerName="extract-content" Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.276480 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1087e77-3755-41b6-8c3c-868a398d085f" containerName="extract-content" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276490 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1087e77-3755-41b6-8c3c-868a398d085f" containerName="extract-content" Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.276508 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74e54e30-ddaf-4a76-8f09-936209021a0a" containerName="extract-utilities" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276519 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="74e54e30-ddaf-4a76-8f09-936209021a0a" containerName="extract-utilities" Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.276532 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74e54e30-ddaf-4a76-8f09-936209021a0a" containerName="registry-server" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276543 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="74e54e30-ddaf-4a76-8f09-936209021a0a" containerName="registry-server" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276871 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="74e54e30-ddaf-4a76-8f09-936209021a0a" containerName="registry-server" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276911 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f97372f-3800-474d-aab1-dfc0ee62fc4f" containerName="registry-server" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.276929 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1087e77-3755-41b6-8c3c-868a398d085f" containerName="registry-server" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.277845 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.290214 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.290853 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-njwk9" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.308630 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-qkz6f"] Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.319427 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv"] Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.319776 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.325297 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.325520 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.369172 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-frr-conf\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.369424 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8a708d96-5e34-4479-83c4-90bfd3eb2e80-frr-startup\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.369612 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpr6k\" (UniqueName: \"kubernetes.io/projected/92d25015-4495-4c5a-a65d-e8027a8a1a00-kube-api-access-rpr6k\") pod \"frr-k8s-webhook-server-7fcb986d4-42zxv\" (UID: \"92d25015-4495-4c5a-a65d-e8027a8a1a00\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.369700 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a708d96-5e34-4479-83c4-90bfd3eb2e80-metrics-certs\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.369779 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-reloader\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.369856 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-metrics\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.369921 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/92d25015-4495-4c5a-a65d-e8027a8a1a00-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-42zxv\" (UID: \"92d25015-4495-4c5a-a65d-e8027a8a1a00\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.369987 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-frr-sockets\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.370075 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8d8c\" (UniqueName: \"kubernetes.io/projected/8a708d96-5e34-4479-83c4-90bfd3eb2e80-kube-api-access-m8d8c\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.421848 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-nrs5t"] Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.422722 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-nrs5t" Dec 03 19:44:26 crc kubenswrapper[4916]: W1203 19:44:26.426915 4916 reflector.go:561] object-"metallb-system"/"metallb-excludel2": failed to list *v1.ConfigMap: configmaps "metallb-excludel2" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "metallb-system": no relationship found between node 'crc' and this object Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.426953 4916 reflector.go:158] "Unhandled Error" err="object-\"metallb-system\"/\"metallb-excludel2\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"metallb-excludel2\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"metallb-system\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 19:44:26 crc kubenswrapper[4916]: W1203 19:44:26.426954 4916 reflector.go:561] object-"metallb-system"/"speaker-certs-secret": failed to list *v1.Secret: secrets "speaker-certs-secret" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "metallb-system": no relationship found between node 'crc' and this object Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.426994 4916 reflector.go:158] "Unhandled Error" err="object-\"metallb-system\"/\"speaker-certs-secret\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"speaker-certs-secret\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"metallb-system\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.427577 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-h9kpd" Dec 03 19:44:26 crc kubenswrapper[4916]: W1203 19:44:26.434947 4916 reflector.go:561] object-"metallb-system"/"metallb-memberlist": failed to list *v1.Secret: secrets "metallb-memberlist" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "metallb-system": no relationship found between node 'crc' and this object Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.435121 4916 reflector.go:158] "Unhandled Error" err="object-\"metallb-system\"/\"metallb-memberlist\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"metallb-memberlist\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"metallb-system\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.435797 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-zcqns"] Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.436878 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.439715 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.448150 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-zcqns"] Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473421 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-metrics\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473460 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/92d25015-4495-4c5a-a65d-e8027a8a1a00-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-42zxv\" (UID: \"92d25015-4495-4c5a-a65d-e8027a8a1a00\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473483 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/4fefc703-1f37-4d7f-a4cd-54415e811abe-metallb-excludel2\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473503 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-frr-sockets\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473527 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8d8c\" (UniqueName: \"kubernetes.io/projected/8a708d96-5e34-4479-83c4-90bfd3eb2e80-kube-api-access-m8d8c\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473546 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-metrics-certs\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473586 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5q4b\" (UniqueName: \"kubernetes.io/projected/4fefc703-1f37-4d7f-a4cd-54415e811abe-kube-api-access-b5q4b\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473604 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca08d810-a5ec-4683-b666-4460bbaed1a1-cert\") pod \"controller-f8648f98b-zcqns\" (UID: \"ca08d810-a5ec-4683-b666-4460bbaed1a1\") " pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473617 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ca08d810-a5ec-4683-b666-4460bbaed1a1-metrics-certs\") pod \"controller-f8648f98b-zcqns\" (UID: \"ca08d810-a5ec-4683-b666-4460bbaed1a1\") " pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473636 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-frr-conf\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473655 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8a708d96-5e34-4479-83c4-90bfd3eb2e80-frr-startup\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473674 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpr6k\" (UniqueName: \"kubernetes.io/projected/92d25015-4495-4c5a-a65d-e8027a8a1a00-kube-api-access-rpr6k\") pod \"frr-k8s-webhook-server-7fcb986d4-42zxv\" (UID: \"92d25015-4495-4c5a-a65d-e8027a8a1a00\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473689 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-memberlist\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473716 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a708d96-5e34-4479-83c4-90bfd3eb2e80-metrics-certs\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473737 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-reloader\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.473752 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdn64\" (UniqueName: \"kubernetes.io/projected/ca08d810-a5ec-4683-b666-4460bbaed1a1-kube-api-access-vdn64\") pod \"controller-f8648f98b-zcqns\" (UID: \"ca08d810-a5ec-4683-b666-4460bbaed1a1\") " pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.474394 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-metrics\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.475008 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-frr-conf\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.475095 4916 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 03 19:44:26 crc kubenswrapper[4916]: E1203 19:44:26.475140 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8a708d96-5e34-4479-83c4-90bfd3eb2e80-metrics-certs podName:8a708d96-5e34-4479-83c4-90bfd3eb2e80 nodeName:}" failed. No retries permitted until 2025-12-03 19:44:26.975120381 +0000 UTC m=+882.937930647 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8a708d96-5e34-4479-83c4-90bfd3eb2e80-metrics-certs") pod "frr-k8s-qkz6f" (UID: "8a708d96-5e34-4479-83c4-90bfd3eb2e80") : secret "frr-k8s-certs-secret" not found Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.475481 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-reloader\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.475789 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8a708d96-5e34-4479-83c4-90bfd3eb2e80-frr-startup\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.476288 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8a708d96-5e34-4479-83c4-90bfd3eb2e80-frr-sockets\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.482755 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/92d25015-4495-4c5a-a65d-e8027a8a1a00-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-42zxv\" (UID: \"92d25015-4495-4c5a-a65d-e8027a8a1a00\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.485476 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1087e77-3755-41b6-8c3c-868a398d085f" path="/var/lib/kubelet/pods/f1087e77-3755-41b6-8c3c-868a398d085f/volumes" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.498962 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpr6k\" (UniqueName: \"kubernetes.io/projected/92d25015-4495-4c5a-a65d-e8027a8a1a00-kube-api-access-rpr6k\") pod \"frr-k8s-webhook-server-7fcb986d4-42zxv\" (UID: \"92d25015-4495-4c5a-a65d-e8027a8a1a00\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.499969 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8d8c\" (UniqueName: \"kubernetes.io/projected/8a708d96-5e34-4479-83c4-90bfd3eb2e80-kube-api-access-m8d8c\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.575414 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5q4b\" (UniqueName: \"kubernetes.io/projected/4fefc703-1f37-4d7f-a4cd-54415e811abe-kube-api-access-b5q4b\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.575476 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca08d810-a5ec-4683-b666-4460bbaed1a1-cert\") pod \"controller-f8648f98b-zcqns\" (UID: \"ca08d810-a5ec-4683-b666-4460bbaed1a1\") " pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.575499 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ca08d810-a5ec-4683-b666-4460bbaed1a1-metrics-certs\") pod \"controller-f8648f98b-zcqns\" (UID: \"ca08d810-a5ec-4683-b666-4460bbaed1a1\") " pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.575554 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-memberlist\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.575643 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdn64\" (UniqueName: \"kubernetes.io/projected/ca08d810-a5ec-4683-b666-4460bbaed1a1-kube-api-access-vdn64\") pod \"controller-f8648f98b-zcqns\" (UID: \"ca08d810-a5ec-4683-b666-4460bbaed1a1\") " pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.575682 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/4fefc703-1f37-4d7f-a4cd-54415e811abe-metallb-excludel2\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.575756 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-metrics-certs\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.579270 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca08d810-a5ec-4683-b666-4460bbaed1a1-cert\") pod \"controller-f8648f98b-zcqns\" (UID: \"ca08d810-a5ec-4683-b666-4460bbaed1a1\") " pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.579589 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ca08d810-a5ec-4683-b666-4460bbaed1a1-metrics-certs\") pod \"controller-f8648f98b-zcqns\" (UID: \"ca08d810-a5ec-4683-b666-4460bbaed1a1\") " pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.595441 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5q4b\" (UniqueName: \"kubernetes.io/projected/4fefc703-1f37-4d7f-a4cd-54415e811abe-kube-api-access-b5q4b\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.596147 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdn64\" (UniqueName: \"kubernetes.io/projected/ca08d810-a5ec-4683-b666-4460bbaed1a1-kube-api-access-vdn64\") pod \"controller-f8648f98b-zcqns\" (UID: \"ca08d810-a5ec-4683-b666-4460bbaed1a1\") " pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.606014 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.750428 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.814861 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv"] Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.842739 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" event={"ID":"92d25015-4495-4c5a-a65d-e8027a8a1a00","Type":"ContainerStarted","Data":"3774c446d35d3613a91b6d9ec6a2f43b5ae28c54e91c46334cb2077d0c90f721"} Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.946276 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-zcqns"] Dec 03 19:44:26 crc kubenswrapper[4916]: W1203 19:44:26.952604 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca08d810_a5ec_4683_b666_4460bbaed1a1.slice/crio-6ad395aa9b05847937d265a8ede0091396894a470dd5121c4531a0162274b993 WatchSource:0}: Error finding container 6ad395aa9b05847937d265a8ede0091396894a470dd5121c4531a0162274b993: Status 404 returned error can't find the container with id 6ad395aa9b05847937d265a8ede0091396894a470dd5121c4531a0162274b993 Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.980499 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a708d96-5e34-4479-83c4-90bfd3eb2e80-metrics-certs\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:26 crc kubenswrapper[4916]: I1203 19:44:26.986539 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8a708d96-5e34-4479-83c4-90bfd3eb2e80-metrics-certs\") pod \"frr-k8s-qkz6f\" (UID: \"8a708d96-5e34-4479-83c4-90bfd3eb2e80\") " pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:27 crc kubenswrapper[4916]: I1203 19:44:27.239209 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:27 crc kubenswrapper[4916]: E1203 19:44:27.577702 4916 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: failed to sync secret cache: timed out waiting for the condition Dec 03 19:44:27 crc kubenswrapper[4916]: E1203 19:44:27.578165 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-metrics-certs podName:4fefc703-1f37-4d7f-a4cd-54415e811abe nodeName:}" failed. No retries permitted until 2025-12-03 19:44:28.078135962 +0000 UTC m=+884.040946238 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-metrics-certs") pod "speaker-nrs5t" (UID: "4fefc703-1f37-4d7f-a4cd-54415e811abe") : failed to sync secret cache: timed out waiting for the condition Dec 03 19:44:27 crc kubenswrapper[4916]: E1203 19:44:27.577834 4916 configmap.go:193] Couldn't get configMap metallb-system/metallb-excludel2: failed to sync configmap cache: timed out waiting for the condition Dec 03 19:44:27 crc kubenswrapper[4916]: E1203 19:44:27.578490 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4fefc703-1f37-4d7f-a4cd-54415e811abe-metallb-excludel2 podName:4fefc703-1f37-4d7f-a4cd-54415e811abe nodeName:}" failed. No retries permitted until 2025-12-03 19:44:28.078474421 +0000 UTC m=+884.041284717 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metallb-excludel2" (UniqueName: "kubernetes.io/configmap/4fefc703-1f37-4d7f-a4cd-54415e811abe-metallb-excludel2") pod "speaker-nrs5t" (UID: "4fefc703-1f37-4d7f-a4cd-54415e811abe") : failed to sync configmap cache: timed out waiting for the condition Dec 03 19:44:27 crc kubenswrapper[4916]: E1203 19:44:27.577867 4916 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: failed to sync secret cache: timed out waiting for the condition Dec 03 19:44:27 crc kubenswrapper[4916]: E1203 19:44:27.578549 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-memberlist podName:4fefc703-1f37-4d7f-a4cd-54415e811abe nodeName:}" failed. No retries permitted until 2025-12-03 19:44:28.078536203 +0000 UTC m=+884.041346479 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-memberlist") pod "speaker-nrs5t" (UID: "4fefc703-1f37-4d7f-a4cd-54415e811abe") : failed to sync secret cache: timed out waiting for the condition Dec 03 19:44:27 crc kubenswrapper[4916]: I1203 19:44:27.802414 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 03 19:44:27 crc kubenswrapper[4916]: I1203 19:44:27.841659 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 03 19:44:27 crc kubenswrapper[4916]: I1203 19:44:27.868125 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-zcqns" event={"ID":"ca08d810-a5ec-4683-b666-4460bbaed1a1","Type":"ContainerStarted","Data":"c8254ed428c7c166c03a56d313ca6ddd7c004c760b0cf7e2e5a5d591c129a092"} Dec 03 19:44:27 crc kubenswrapper[4916]: I1203 19:44:27.868196 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-zcqns" event={"ID":"ca08d810-a5ec-4683-b666-4460bbaed1a1","Type":"ContainerStarted","Data":"ea0a8ca073ed0ef04b48aed5d0f1b3f1e9d632b7b2e651d9e0eb118ee8e5e943"} Dec 03 19:44:27 crc kubenswrapper[4916]: I1203 19:44:27.868224 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-zcqns" event={"ID":"ca08d810-a5ec-4683-b666-4460bbaed1a1","Type":"ContainerStarted","Data":"6ad395aa9b05847937d265a8ede0091396894a470dd5121c4531a0162274b993"} Dec 03 19:44:27 crc kubenswrapper[4916]: I1203 19:44:27.868844 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:27 crc kubenswrapper[4916]: I1203 19:44:27.874937 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkz6f" event={"ID":"8a708d96-5e34-4479-83c4-90bfd3eb2e80","Type":"ContainerStarted","Data":"d2bd989543b5d18e45063ce6995e63bd7ae2ad592555cafcb0e91728995aa985"} Dec 03 19:44:27 crc kubenswrapper[4916]: I1203 19:44:27.899354 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-zcqns" podStartSLOduration=1.899332539 podStartE2EDuration="1.899332539s" podCreationTimestamp="2025-12-03 19:44:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:44:27.895925277 +0000 UTC m=+883.858735563" watchObservedRunningTime="2025-12-03 19:44:27.899332539 +0000 UTC m=+883.862142815" Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.016873 4916 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.096232 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/4fefc703-1f37-4d7f-a4cd-54415e811abe-metallb-excludel2\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.096294 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-metrics-certs\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.096344 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-memberlist\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.097174 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/4fefc703-1f37-4d7f-a4cd-54415e811abe-metallb-excludel2\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.100437 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-metrics-certs\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.100863 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/4fefc703-1f37-4d7f-a4cd-54415e811abe-memberlist\") pod \"speaker-nrs5t\" (UID: \"4fefc703-1f37-4d7f-a4cd-54415e811abe\") " pod="metallb-system/speaker-nrs5t" Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.235221 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-nrs5t" Dec 03 19:44:28 crc kubenswrapper[4916]: W1203 19:44:28.271979 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4fefc703_1f37_4d7f_a4cd_54415e811abe.slice/crio-5d40adb10ec3461ae3d77c0aa9e22dc7a42dca63fa69843a1423242d752f46e6 WatchSource:0}: Error finding container 5d40adb10ec3461ae3d77c0aa9e22dc7a42dca63fa69843a1423242d752f46e6: Status 404 returned error can't find the container with id 5d40adb10ec3461ae3d77c0aa9e22dc7a42dca63fa69843a1423242d752f46e6 Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.882587 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nrs5t" event={"ID":"4fefc703-1f37-4d7f-a4cd-54415e811abe","Type":"ContainerStarted","Data":"a94912a5f105decc9725e8457b45a0e1a46e085f3bf207868273e12ce0fc6f53"} Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.882823 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nrs5t" event={"ID":"4fefc703-1f37-4d7f-a4cd-54415e811abe","Type":"ContainerStarted","Data":"6184b610fb2b41c775584ca5ce503dbfc9cf33e4e3b57d1e4139e87260844058"} Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.882835 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nrs5t" event={"ID":"4fefc703-1f37-4d7f-a4cd-54415e811abe","Type":"ContainerStarted","Data":"5d40adb10ec3461ae3d77c0aa9e22dc7a42dca63fa69843a1423242d752f46e6"} Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.883019 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-nrs5t" Dec 03 19:44:28 crc kubenswrapper[4916]: I1203 19:44:28.909239 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-nrs5t" podStartSLOduration=2.909222192 podStartE2EDuration="2.909222192s" podCreationTimestamp="2025-12-03 19:44:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:44:28.907407803 +0000 UTC m=+884.870218069" watchObservedRunningTime="2025-12-03 19:44:28.909222192 +0000 UTC m=+884.872032458" Dec 03 19:44:34 crc kubenswrapper[4916]: I1203 19:44:34.939899 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" event={"ID":"92d25015-4495-4c5a-a65d-e8027a8a1a00","Type":"ContainerStarted","Data":"6d89107e75d2c23045485ee8fa920177c83266f60e5f524b7bc4c24f2a6e2e22"} Dec 03 19:44:34 crc kubenswrapper[4916]: I1203 19:44:34.940682 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" Dec 03 19:44:34 crc kubenswrapper[4916]: I1203 19:44:34.944505 4916 generic.go:334] "Generic (PLEG): container finished" podID="8a708d96-5e34-4479-83c4-90bfd3eb2e80" containerID="ce74bde6f5de82827f35b1b18b2e07b4cb089d97ba766a45d97c4c63f003eb96" exitCode=0 Dec 03 19:44:34 crc kubenswrapper[4916]: I1203 19:44:34.944600 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkz6f" event={"ID":"8a708d96-5e34-4479-83c4-90bfd3eb2e80","Type":"ContainerDied","Data":"ce74bde6f5de82827f35b1b18b2e07b4cb089d97ba766a45d97c4c63f003eb96"} Dec 03 19:44:34 crc kubenswrapper[4916]: I1203 19:44:34.967167 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" podStartSLOduration=1.690397438 podStartE2EDuration="8.967144999s" podCreationTimestamp="2025-12-03 19:44:26 +0000 UTC" firstStartedPulling="2025-12-03 19:44:26.830510563 +0000 UTC m=+882.793320829" lastFinishedPulling="2025-12-03 19:44:34.107258084 +0000 UTC m=+890.070068390" observedRunningTime="2025-12-03 19:44:34.961927168 +0000 UTC m=+890.924737434" watchObservedRunningTime="2025-12-03 19:44:34.967144999 +0000 UTC m=+890.929955305" Dec 03 19:44:35 crc kubenswrapper[4916]: I1203 19:44:35.955689 4916 generic.go:334] "Generic (PLEG): container finished" podID="8a708d96-5e34-4479-83c4-90bfd3eb2e80" containerID="1f4cc4a1f578e78e205a9ddbf145920a9e3d821efa4134e9756b64479ab6945c" exitCode=0 Dec 03 19:44:35 crc kubenswrapper[4916]: I1203 19:44:35.955821 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkz6f" event={"ID":"8a708d96-5e34-4479-83c4-90bfd3eb2e80","Type":"ContainerDied","Data":"1f4cc4a1f578e78e205a9ddbf145920a9e3d821efa4134e9756b64479ab6945c"} Dec 03 19:44:36 crc kubenswrapper[4916]: I1203 19:44:36.965624 4916 generic.go:334] "Generic (PLEG): container finished" podID="8a708d96-5e34-4479-83c4-90bfd3eb2e80" containerID="229606c56d4529583371878e55d8499c14feeedfac5b974a086e86777ae0a25a" exitCode=0 Dec 03 19:44:36 crc kubenswrapper[4916]: I1203 19:44:36.965692 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkz6f" event={"ID":"8a708d96-5e34-4479-83c4-90bfd3eb2e80","Type":"ContainerDied","Data":"229606c56d4529583371878e55d8499c14feeedfac5b974a086e86777ae0a25a"} Dec 03 19:44:37 crc kubenswrapper[4916]: I1203 19:44:37.986590 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkz6f" event={"ID":"8a708d96-5e34-4479-83c4-90bfd3eb2e80","Type":"ContainerStarted","Data":"0a002738d211718f9dc9994206d6bdc7e191869bc3d894ef92a62d89caf1e2d8"} Dec 03 19:44:37 crc kubenswrapper[4916]: I1203 19:44:37.986640 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkz6f" event={"ID":"8a708d96-5e34-4479-83c4-90bfd3eb2e80","Type":"ContainerStarted","Data":"a8f09237a4111046e7638c7226b130c153614011a790f3b562c59a503877ef7a"} Dec 03 19:44:37 crc kubenswrapper[4916]: I1203 19:44:37.986654 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkz6f" event={"ID":"8a708d96-5e34-4479-83c4-90bfd3eb2e80","Type":"ContainerStarted","Data":"a8de7cabb56ea478d892ee580e9aec6f1554508c5e4e4de35d35dc62bec98f3a"} Dec 03 19:44:37 crc kubenswrapper[4916]: I1203 19:44:37.986666 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkz6f" event={"ID":"8a708d96-5e34-4479-83c4-90bfd3eb2e80","Type":"ContainerStarted","Data":"35a92b1d7e9db6c8546feca0fe05f2e1d49012371861ed13e9da908344ec29df"} Dec 03 19:44:37 crc kubenswrapper[4916]: I1203 19:44:37.986675 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkz6f" event={"ID":"8a708d96-5e34-4479-83c4-90bfd3eb2e80","Type":"ContainerStarted","Data":"f88b617fec0b4c0124faeff8677bdd8fc462ea53709bf6eca77b466b1217ca7f"} Dec 03 19:44:38 crc kubenswrapper[4916]: I1203 19:44:38.240862 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-nrs5t" Dec 03 19:44:39 crc kubenswrapper[4916]: I1203 19:44:39.002438 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-qkz6f" event={"ID":"8a708d96-5e34-4479-83c4-90bfd3eb2e80","Type":"ContainerStarted","Data":"73759a5f1e99f212ead98b8f6b8c8cd8a49f133f7d588e95dfa0fe56e9b8212f"} Dec 03 19:44:39 crc kubenswrapper[4916]: I1203 19:44:39.003112 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:39 crc kubenswrapper[4916]: I1203 19:44:39.037925 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-qkz6f" podStartSLOduration=6.309971725 podStartE2EDuration="13.037903734s" podCreationTimestamp="2025-12-03 19:44:26 +0000 UTC" firstStartedPulling="2025-12-03 19:44:27.349303373 +0000 UTC m=+883.312113639" lastFinishedPulling="2025-12-03 19:44:34.077235372 +0000 UTC m=+890.040045648" observedRunningTime="2025-12-03 19:44:39.034943494 +0000 UTC m=+894.997753810" watchObservedRunningTime="2025-12-03 19:44:39.037903734 +0000 UTC m=+895.000714000" Dec 03 19:44:41 crc kubenswrapper[4916]: I1203 19:44:41.267756 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-sjmmt"] Dec 03 19:44:41 crc kubenswrapper[4916]: I1203 19:44:41.270177 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sjmmt" Dec 03 19:44:41 crc kubenswrapper[4916]: I1203 19:44:41.272800 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-77ls8" Dec 03 19:44:41 crc kubenswrapper[4916]: I1203 19:44:41.273804 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 03 19:44:41 crc kubenswrapper[4916]: I1203 19:44:41.277243 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 03 19:44:41 crc kubenswrapper[4916]: I1203 19:44:41.309631 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-sjmmt"] Dec 03 19:44:41 crc kubenswrapper[4916]: I1203 19:44:41.310158 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6skn\" (UniqueName: \"kubernetes.io/projected/054d0fac-68b1-4edc-877b-93b286a5c7f4-kube-api-access-r6skn\") pod \"openstack-operator-index-sjmmt\" (UID: \"054d0fac-68b1-4edc-877b-93b286a5c7f4\") " pod="openstack-operators/openstack-operator-index-sjmmt" Dec 03 19:44:41 crc kubenswrapper[4916]: I1203 19:44:41.411342 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6skn\" (UniqueName: \"kubernetes.io/projected/054d0fac-68b1-4edc-877b-93b286a5c7f4-kube-api-access-r6skn\") pod \"openstack-operator-index-sjmmt\" (UID: \"054d0fac-68b1-4edc-877b-93b286a5c7f4\") " pod="openstack-operators/openstack-operator-index-sjmmt" Dec 03 19:44:41 crc kubenswrapper[4916]: I1203 19:44:41.442163 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6skn\" (UniqueName: \"kubernetes.io/projected/054d0fac-68b1-4edc-877b-93b286a5c7f4-kube-api-access-r6skn\") pod \"openstack-operator-index-sjmmt\" (UID: \"054d0fac-68b1-4edc-877b-93b286a5c7f4\") " pod="openstack-operators/openstack-operator-index-sjmmt" Dec 03 19:44:41 crc kubenswrapper[4916]: I1203 19:44:41.604699 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sjmmt" Dec 03 19:44:41 crc kubenswrapper[4916]: I1203 19:44:41.817296 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-sjmmt"] Dec 03 19:44:41 crc kubenswrapper[4916]: W1203 19:44:41.824612 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod054d0fac_68b1_4edc_877b_93b286a5c7f4.slice/crio-8605f6309c15daaf023b7fbb66d5585042d7b58c1608013db58be7a1140df9ff WatchSource:0}: Error finding container 8605f6309c15daaf023b7fbb66d5585042d7b58c1608013db58be7a1140df9ff: Status 404 returned error can't find the container with id 8605f6309c15daaf023b7fbb66d5585042d7b58c1608013db58be7a1140df9ff Dec 03 19:44:42 crc kubenswrapper[4916]: I1203 19:44:42.026609 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sjmmt" event={"ID":"054d0fac-68b1-4edc-877b-93b286a5c7f4","Type":"ContainerStarted","Data":"8605f6309c15daaf023b7fbb66d5585042d7b58c1608013db58be7a1140df9ff"} Dec 03 19:44:42 crc kubenswrapper[4916]: I1203 19:44:42.241363 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:42 crc kubenswrapper[4916]: I1203 19:44:42.283201 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:44 crc kubenswrapper[4916]: I1203 19:44:44.443015 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-sjmmt"] Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.047096 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-n7msw"] Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.048662 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-n7msw" Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.049801 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sjmmt" event={"ID":"054d0fac-68b1-4edc-877b-93b286a5c7f4","Type":"ContainerStarted","Data":"95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb"} Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.049928 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-sjmmt" podUID="054d0fac-68b1-4edc-877b-93b286a5c7f4" containerName="registry-server" containerID="cri-o://95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb" gracePeriod=2 Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.066157 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-n7msw"] Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.069440 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtxs2\" (UniqueName: \"kubernetes.io/projected/005f663b-9f61-44c6-8958-bbb7311df12e-kube-api-access-rtxs2\") pod \"openstack-operator-index-n7msw\" (UID: \"005f663b-9f61-44c6-8958-bbb7311df12e\") " pod="openstack-operators/openstack-operator-index-n7msw" Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.120398 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-sjmmt" podStartSLOduration=1.458207946 podStartE2EDuration="4.120367085s" podCreationTimestamp="2025-12-03 19:44:41 +0000 UTC" firstStartedPulling="2025-12-03 19:44:41.826732918 +0000 UTC m=+897.789543184" lastFinishedPulling="2025-12-03 19:44:44.488892017 +0000 UTC m=+900.451702323" observedRunningTime="2025-12-03 19:44:45.117330633 +0000 UTC m=+901.080140929" watchObservedRunningTime="2025-12-03 19:44:45.120367085 +0000 UTC m=+901.083177391" Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.170845 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtxs2\" (UniqueName: \"kubernetes.io/projected/005f663b-9f61-44c6-8958-bbb7311df12e-kube-api-access-rtxs2\") pod \"openstack-operator-index-n7msw\" (UID: \"005f663b-9f61-44c6-8958-bbb7311df12e\") " pod="openstack-operators/openstack-operator-index-n7msw" Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.209904 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtxs2\" (UniqueName: \"kubernetes.io/projected/005f663b-9f61-44c6-8958-bbb7311df12e-kube-api-access-rtxs2\") pod \"openstack-operator-index-n7msw\" (UID: \"005f663b-9f61-44c6-8958-bbb7311df12e\") " pod="openstack-operators/openstack-operator-index-n7msw" Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.374667 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-n7msw" Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.464177 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sjmmt" Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.579264 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6skn\" (UniqueName: \"kubernetes.io/projected/054d0fac-68b1-4edc-877b-93b286a5c7f4-kube-api-access-r6skn\") pod \"054d0fac-68b1-4edc-877b-93b286a5c7f4\" (UID: \"054d0fac-68b1-4edc-877b-93b286a5c7f4\") " Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.593270 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/054d0fac-68b1-4edc-877b-93b286a5c7f4-kube-api-access-r6skn" (OuterVolumeSpecName: "kube-api-access-r6skn") pod "054d0fac-68b1-4edc-877b-93b286a5c7f4" (UID: "054d0fac-68b1-4edc-877b-93b286a5c7f4"). InnerVolumeSpecName "kube-api-access-r6skn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.659355 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-n7msw"] Dec 03 19:44:45 crc kubenswrapper[4916]: I1203 19:44:45.681262 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6skn\" (UniqueName: \"kubernetes.io/projected/054d0fac-68b1-4edc-877b-93b286a5c7f4-kube-api-access-r6skn\") on node \"crc\" DevicePath \"\"" Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.057881 4916 generic.go:334] "Generic (PLEG): container finished" podID="054d0fac-68b1-4edc-877b-93b286a5c7f4" containerID="95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb" exitCode=0 Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.057940 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sjmmt" event={"ID":"054d0fac-68b1-4edc-877b-93b286a5c7f4","Type":"ContainerDied","Data":"95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb"} Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.057960 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-sjmmt" Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.058026 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-sjmmt" event={"ID":"054d0fac-68b1-4edc-877b-93b286a5c7f4","Type":"ContainerDied","Data":"8605f6309c15daaf023b7fbb66d5585042d7b58c1608013db58be7a1140df9ff"} Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.058059 4916 scope.go:117] "RemoveContainer" containerID="95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb" Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.059275 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-n7msw" event={"ID":"005f663b-9f61-44c6-8958-bbb7311df12e","Type":"ContainerStarted","Data":"7bfaa892066a0deee4b078387a15950eda42bdc3b4c6fdef47b65a9851144928"} Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.059318 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-n7msw" event={"ID":"005f663b-9f61-44c6-8958-bbb7311df12e","Type":"ContainerStarted","Data":"6d95f0fc3b6dd5b26fa447c9e2e5608049b9ed325b9531757eb83d49765ce3dc"} Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.080398 4916 scope.go:117] "RemoveContainer" containerID="95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb" Dec 03 19:44:46 crc kubenswrapper[4916]: E1203 19:44:46.080940 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb\": container with ID starting with 95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb not found: ID does not exist" containerID="95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb" Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.080983 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb"} err="failed to get container status \"95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb\": rpc error: code = NotFound desc = could not find container \"95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb\": container with ID starting with 95b492fe135c9beddcc10ef836751ab63f56473dea327f6c6b9b239c0404affb not found: ID does not exist" Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.081689 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-n7msw" podStartSLOduration=1.026290085 podStartE2EDuration="1.081652373s" podCreationTimestamp="2025-12-03 19:44:45 +0000 UTC" firstStartedPulling="2025-12-03 19:44:45.666641839 +0000 UTC m=+901.629452115" lastFinishedPulling="2025-12-03 19:44:45.722004127 +0000 UTC m=+901.684814403" observedRunningTime="2025-12-03 19:44:46.078147549 +0000 UTC m=+902.040957905" watchObservedRunningTime="2025-12-03 19:44:46.081652373 +0000 UTC m=+902.044462689" Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.100521 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-sjmmt"] Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.104476 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-sjmmt"] Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.159408 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.159499 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.489932 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="054d0fac-68b1-4edc-877b-93b286a5c7f4" path="/var/lib/kubelet/pods/054d0fac-68b1-4edc-877b-93b286a5c7f4/volumes" Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.613631 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-42zxv" Dec 03 19:44:46 crc kubenswrapper[4916]: I1203 19:44:46.756320 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-zcqns" Dec 03 19:44:47 crc kubenswrapper[4916]: I1203 19:44:47.244902 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-qkz6f" Dec 03 19:44:55 crc kubenswrapper[4916]: I1203 19:44:55.375828 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-n7msw" Dec 03 19:44:55 crc kubenswrapper[4916]: I1203 19:44:55.376438 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-n7msw" Dec 03 19:44:55 crc kubenswrapper[4916]: I1203 19:44:55.415860 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-n7msw" Dec 03 19:44:56 crc kubenswrapper[4916]: I1203 19:44:56.166157 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-n7msw" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.211798 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969"] Dec 03 19:45:00 crc kubenswrapper[4916]: E1203 19:45:00.212416 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="054d0fac-68b1-4edc-877b-93b286a5c7f4" containerName="registry-server" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.212432 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="054d0fac-68b1-4edc-877b-93b286a5c7f4" containerName="registry-server" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.212580 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="054d0fac-68b1-4edc-877b-93b286a5c7f4" containerName="registry-server" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.213079 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.215353 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.215410 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.224762 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969"] Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.319429 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-278c4\" (UniqueName: \"kubernetes.io/projected/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-kube-api-access-278c4\") pod \"collect-profiles-29413185-v2969\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.319751 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-config-volume\") pod \"collect-profiles-29413185-v2969\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.319813 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-secret-volume\") pod \"collect-profiles-29413185-v2969\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.421400 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-278c4\" (UniqueName: \"kubernetes.io/projected/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-kube-api-access-278c4\") pod \"collect-profiles-29413185-v2969\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.421498 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-config-volume\") pod \"collect-profiles-29413185-v2969\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.421525 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-secret-volume\") pod \"collect-profiles-29413185-v2969\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.423274 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-config-volume\") pod \"collect-profiles-29413185-v2969\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.428983 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-secret-volume\") pod \"collect-profiles-29413185-v2969\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.448824 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-278c4\" (UniqueName: \"kubernetes.io/projected/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-kube-api-access-278c4\") pod \"collect-profiles-29413185-v2969\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.530297 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:00 crc kubenswrapper[4916]: I1203 19:45:00.941094 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969"] Dec 03 19:45:01 crc kubenswrapper[4916]: I1203 19:45:01.160625 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" event={"ID":"eea933c9-f15e-4e5e-8b14-60e7b80b32b3","Type":"ContainerStarted","Data":"0e086c2ae621389a6e27aff32921f9566a7291cbb243d1deed38c1a414508ba2"} Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.673055 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd"] Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.675330 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.681135 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-v47kj" Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.696087 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd"] Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.754691 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-util\") pod \"0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.754736 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-bundle\") pod \"0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.754757 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t9gj\" (UniqueName: \"kubernetes.io/projected/6d033d79-255d-44d4-8082-b1044f95ab2e-kube-api-access-4t9gj\") pod \"0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.856171 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-bundle\") pod \"0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.856217 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t9gj\" (UniqueName: \"kubernetes.io/projected/6d033d79-255d-44d4-8082-b1044f95ab2e-kube-api-access-4t9gj\") pod \"0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.856305 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-util\") pod \"0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.856832 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-util\") pod \"0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.857041 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-bundle\") pod \"0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.900085 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t9gj\" (UniqueName: \"kubernetes.io/projected/6d033d79-255d-44d4-8082-b1044f95ab2e-kube-api-access-4t9gj\") pod \"0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:02 crc kubenswrapper[4916]: I1203 19:45:02.998356 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:03 crc kubenswrapper[4916]: I1203 19:45:03.193952 4916 generic.go:334] "Generic (PLEG): container finished" podID="eea933c9-f15e-4e5e-8b14-60e7b80b32b3" containerID="659bc0e01e60b0673ce5df4b220eebca2920cb02b54472405a0a332e76c5d168" exitCode=0 Dec 03 19:45:03 crc kubenswrapper[4916]: I1203 19:45:03.194739 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" event={"ID":"eea933c9-f15e-4e5e-8b14-60e7b80b32b3","Type":"ContainerDied","Data":"659bc0e01e60b0673ce5df4b220eebca2920cb02b54472405a0a332e76c5d168"} Dec 03 19:45:03 crc kubenswrapper[4916]: I1203 19:45:03.480684 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd"] Dec 03 19:45:03 crc kubenswrapper[4916]: W1203 19:45:03.486660 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d033d79_255d_44d4_8082_b1044f95ab2e.slice/crio-bf668374703c1dc33706be43d027ac78d4748c07e4b4bcad6432ee36d9cadfff WatchSource:0}: Error finding container bf668374703c1dc33706be43d027ac78d4748c07e4b4bcad6432ee36d9cadfff: Status 404 returned error can't find the container with id bf668374703c1dc33706be43d027ac78d4748c07e4b4bcad6432ee36d9cadfff Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.203519 4916 generic.go:334] "Generic (PLEG): container finished" podID="6d033d79-255d-44d4-8082-b1044f95ab2e" containerID="fc32ad038bdeabce6ad3065260e2a213883eafce5a47ccbe9f82d264b5f848a7" exitCode=0 Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.203622 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" event={"ID":"6d033d79-255d-44d4-8082-b1044f95ab2e","Type":"ContainerDied","Data":"fc32ad038bdeabce6ad3065260e2a213883eafce5a47ccbe9f82d264b5f848a7"} Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.203958 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" event={"ID":"6d033d79-255d-44d4-8082-b1044f95ab2e","Type":"ContainerStarted","Data":"bf668374703c1dc33706be43d027ac78d4748c07e4b4bcad6432ee36d9cadfff"} Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.464852 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.581879 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-278c4\" (UniqueName: \"kubernetes.io/projected/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-kube-api-access-278c4\") pod \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.582407 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-secret-volume\") pod \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.582458 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-config-volume\") pod \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\" (UID: \"eea933c9-f15e-4e5e-8b14-60e7b80b32b3\") " Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.583820 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-config-volume" (OuterVolumeSpecName: "config-volume") pod "eea933c9-f15e-4e5e-8b14-60e7b80b32b3" (UID: "eea933c9-f15e-4e5e-8b14-60e7b80b32b3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.588649 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-kube-api-access-278c4" (OuterVolumeSpecName: "kube-api-access-278c4") pod "eea933c9-f15e-4e5e-8b14-60e7b80b32b3" (UID: "eea933c9-f15e-4e5e-8b14-60e7b80b32b3"). InnerVolumeSpecName "kube-api-access-278c4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.589790 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "eea933c9-f15e-4e5e-8b14-60e7b80b32b3" (UID: "eea933c9-f15e-4e5e-8b14-60e7b80b32b3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.684010 4916 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.684067 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-278c4\" (UniqueName: \"kubernetes.io/projected/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-kube-api-access-278c4\") on node \"crc\" DevicePath \"\"" Dec 03 19:45:04 crc kubenswrapper[4916]: I1203 19:45:04.684090 4916 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eea933c9-f15e-4e5e-8b14-60e7b80b32b3-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 19:45:05 crc kubenswrapper[4916]: I1203 19:45:05.214347 4916 generic.go:334] "Generic (PLEG): container finished" podID="6d033d79-255d-44d4-8082-b1044f95ab2e" containerID="4c5c96d552f3aafe34c99b79d340577c36b9bd06054769f2208e4690871445f4" exitCode=0 Dec 03 19:45:05 crc kubenswrapper[4916]: I1203 19:45:05.214418 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" event={"ID":"6d033d79-255d-44d4-8082-b1044f95ab2e","Type":"ContainerDied","Data":"4c5c96d552f3aafe34c99b79d340577c36b9bd06054769f2208e4690871445f4"} Dec 03 19:45:05 crc kubenswrapper[4916]: I1203 19:45:05.217364 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" event={"ID":"eea933c9-f15e-4e5e-8b14-60e7b80b32b3","Type":"ContainerDied","Data":"0e086c2ae621389a6e27aff32921f9566a7291cbb243d1deed38c1a414508ba2"} Dec 03 19:45:05 crc kubenswrapper[4916]: I1203 19:45:05.217422 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e086c2ae621389a6e27aff32921f9566a7291cbb243d1deed38c1a414508ba2" Dec 03 19:45:05 crc kubenswrapper[4916]: I1203 19:45:05.217460 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969" Dec 03 19:45:06 crc kubenswrapper[4916]: I1203 19:45:06.242618 4916 generic.go:334] "Generic (PLEG): container finished" podID="6d033d79-255d-44d4-8082-b1044f95ab2e" containerID="778e31b60f15e6cb725cd20c665aef91e727990b2ed48a1476e7bf3360037678" exitCode=0 Dec 03 19:45:06 crc kubenswrapper[4916]: I1203 19:45:06.242740 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" event={"ID":"6d033d79-255d-44d4-8082-b1044f95ab2e","Type":"ContainerDied","Data":"778e31b60f15e6cb725cd20c665aef91e727990b2ed48a1476e7bf3360037678"} Dec 03 19:45:07 crc kubenswrapper[4916]: I1203 19:45:07.501825 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:07 crc kubenswrapper[4916]: I1203 19:45:07.626759 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4t9gj\" (UniqueName: \"kubernetes.io/projected/6d033d79-255d-44d4-8082-b1044f95ab2e-kube-api-access-4t9gj\") pod \"6d033d79-255d-44d4-8082-b1044f95ab2e\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " Dec 03 19:45:07 crc kubenswrapper[4916]: I1203 19:45:07.626851 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-bundle\") pod \"6d033d79-255d-44d4-8082-b1044f95ab2e\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " Dec 03 19:45:07 crc kubenswrapper[4916]: I1203 19:45:07.626896 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-util\") pod \"6d033d79-255d-44d4-8082-b1044f95ab2e\" (UID: \"6d033d79-255d-44d4-8082-b1044f95ab2e\") " Dec 03 19:45:07 crc kubenswrapper[4916]: I1203 19:45:07.627738 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-bundle" (OuterVolumeSpecName: "bundle") pod "6d033d79-255d-44d4-8082-b1044f95ab2e" (UID: "6d033d79-255d-44d4-8082-b1044f95ab2e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:45:07 crc kubenswrapper[4916]: I1203 19:45:07.633031 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d033d79-255d-44d4-8082-b1044f95ab2e-kube-api-access-4t9gj" (OuterVolumeSpecName: "kube-api-access-4t9gj") pod "6d033d79-255d-44d4-8082-b1044f95ab2e" (UID: "6d033d79-255d-44d4-8082-b1044f95ab2e"). InnerVolumeSpecName "kube-api-access-4t9gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:45:07 crc kubenswrapper[4916]: I1203 19:45:07.645973 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-util" (OuterVolumeSpecName: "util") pod "6d033d79-255d-44d4-8082-b1044f95ab2e" (UID: "6d033d79-255d-44d4-8082-b1044f95ab2e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:45:07 crc kubenswrapper[4916]: I1203 19:45:07.728520 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4t9gj\" (UniqueName: \"kubernetes.io/projected/6d033d79-255d-44d4-8082-b1044f95ab2e-kube-api-access-4t9gj\") on node \"crc\" DevicePath \"\"" Dec 03 19:45:07 crc kubenswrapper[4916]: I1203 19:45:07.728554 4916 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:45:07 crc kubenswrapper[4916]: I1203 19:45:07.728582 4916 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6d033d79-255d-44d4-8082-b1044f95ab2e-util\") on node \"crc\" DevicePath \"\"" Dec 03 19:45:08 crc kubenswrapper[4916]: I1203 19:45:08.258339 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" event={"ID":"6d033d79-255d-44d4-8082-b1044f95ab2e","Type":"ContainerDied","Data":"bf668374703c1dc33706be43d027ac78d4748c07e4b4bcad6432ee36d9cadfff"} Dec 03 19:45:08 crc kubenswrapper[4916]: I1203 19:45:08.258606 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf668374703c1dc33706be43d027ac78d4748c07e4b4bcad6432ee36d9cadfff" Dec 03 19:45:08 crc kubenswrapper[4916]: I1203 19:45:08.258445 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd" Dec 03 19:45:14 crc kubenswrapper[4916]: I1203 19:45:14.932850 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd"] Dec 03 19:45:14 crc kubenswrapper[4916]: E1203 19:45:14.933712 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d033d79-255d-44d4-8082-b1044f95ab2e" containerName="util" Dec 03 19:45:14 crc kubenswrapper[4916]: I1203 19:45:14.933731 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d033d79-255d-44d4-8082-b1044f95ab2e" containerName="util" Dec 03 19:45:14 crc kubenswrapper[4916]: E1203 19:45:14.933746 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d033d79-255d-44d4-8082-b1044f95ab2e" containerName="pull" Dec 03 19:45:14 crc kubenswrapper[4916]: I1203 19:45:14.933757 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d033d79-255d-44d4-8082-b1044f95ab2e" containerName="pull" Dec 03 19:45:14 crc kubenswrapper[4916]: E1203 19:45:14.933778 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea933c9-f15e-4e5e-8b14-60e7b80b32b3" containerName="collect-profiles" Dec 03 19:45:14 crc kubenswrapper[4916]: I1203 19:45:14.933788 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea933c9-f15e-4e5e-8b14-60e7b80b32b3" containerName="collect-profiles" Dec 03 19:45:14 crc kubenswrapper[4916]: E1203 19:45:14.933806 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d033d79-255d-44d4-8082-b1044f95ab2e" containerName="extract" Dec 03 19:45:14 crc kubenswrapper[4916]: I1203 19:45:14.933816 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d033d79-255d-44d4-8082-b1044f95ab2e" containerName="extract" Dec 03 19:45:14 crc kubenswrapper[4916]: I1203 19:45:14.934011 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="eea933c9-f15e-4e5e-8b14-60e7b80b32b3" containerName="collect-profiles" Dec 03 19:45:14 crc kubenswrapper[4916]: I1203 19:45:14.934042 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d033d79-255d-44d4-8082-b1044f95ab2e" containerName="extract" Dec 03 19:45:14 crc kubenswrapper[4916]: I1203 19:45:14.934666 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd" Dec 03 19:45:14 crc kubenswrapper[4916]: I1203 19:45:14.936405 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-q88mf" Dec 03 19:45:15 crc kubenswrapper[4916]: I1203 19:45:15.004335 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd"] Dec 03 19:45:15 crc kubenswrapper[4916]: I1203 19:45:15.048491 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx729\" (UniqueName: \"kubernetes.io/projected/e623d74a-1d16-43ec-a04f-a1817a8a8294-kube-api-access-dx729\") pod \"openstack-operator-controller-operator-8547fd68fd-vc7gd\" (UID: \"e623d74a-1d16-43ec-a04f-a1817a8a8294\") " pod="openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd" Dec 03 19:45:15 crc kubenswrapper[4916]: I1203 19:45:15.149723 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx729\" (UniqueName: \"kubernetes.io/projected/e623d74a-1d16-43ec-a04f-a1817a8a8294-kube-api-access-dx729\") pod \"openstack-operator-controller-operator-8547fd68fd-vc7gd\" (UID: \"e623d74a-1d16-43ec-a04f-a1817a8a8294\") " pod="openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd" Dec 03 19:45:15 crc kubenswrapper[4916]: I1203 19:45:15.175459 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx729\" (UniqueName: \"kubernetes.io/projected/e623d74a-1d16-43ec-a04f-a1817a8a8294-kube-api-access-dx729\") pod \"openstack-operator-controller-operator-8547fd68fd-vc7gd\" (UID: \"e623d74a-1d16-43ec-a04f-a1817a8a8294\") " pod="openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd" Dec 03 19:45:15 crc kubenswrapper[4916]: I1203 19:45:15.253510 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd" Dec 03 19:45:15 crc kubenswrapper[4916]: I1203 19:45:15.492991 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd"] Dec 03 19:45:16 crc kubenswrapper[4916]: I1203 19:45:16.159067 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:45:16 crc kubenswrapper[4916]: I1203 19:45:16.159145 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:45:16 crc kubenswrapper[4916]: I1203 19:45:16.159207 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:45:16 crc kubenswrapper[4916]: I1203 19:45:16.160999 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dbc6d2dff458c9d2c91a2f82a009f88b78c61b85becef77733114e92974e9b6f"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 19:45:16 crc kubenswrapper[4916]: I1203 19:45:16.161062 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://dbc6d2dff458c9d2c91a2f82a009f88b78c61b85becef77733114e92974e9b6f" gracePeriod=600 Dec 03 19:45:16 crc kubenswrapper[4916]: I1203 19:45:16.334033 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd" event={"ID":"e623d74a-1d16-43ec-a04f-a1817a8a8294","Type":"ContainerStarted","Data":"9880e0b1c7004d728da3ac0161302d8128bb86f1fce4bf8d9aa0925f52108a14"} Dec 03 19:45:16 crc kubenswrapper[4916]: I1203 19:45:16.342064 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="dbc6d2dff458c9d2c91a2f82a009f88b78c61b85becef77733114e92974e9b6f" exitCode=0 Dec 03 19:45:16 crc kubenswrapper[4916]: I1203 19:45:16.342121 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"dbc6d2dff458c9d2c91a2f82a009f88b78c61b85becef77733114e92974e9b6f"} Dec 03 19:45:16 crc kubenswrapper[4916]: I1203 19:45:16.342184 4916 scope.go:117] "RemoveContainer" containerID="02ae566c7ff459b62724fc48986cab4ba376415af729ca4442e9a81a3e43827b" Dec 03 19:45:17 crc kubenswrapper[4916]: I1203 19:45:17.351027 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"ac2ebe3bbf276071a9bfb2a9d6c5b901691899bf5c59f5b451ee6d04eb0e197f"} Dec 03 19:45:20 crc kubenswrapper[4916]: I1203 19:45:20.369955 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd" event={"ID":"e623d74a-1d16-43ec-a04f-a1817a8a8294","Type":"ContainerStarted","Data":"427199b5976462e3b76ba4ef41e0c2203d905976c619dbd9e62fb9875ea51585"} Dec 03 19:45:20 crc kubenswrapper[4916]: I1203 19:45:20.370455 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd" Dec 03 19:45:20 crc kubenswrapper[4916]: I1203 19:45:20.408005 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd" podStartSLOduration=2.109268457 podStartE2EDuration="6.407983216s" podCreationTimestamp="2025-12-03 19:45:14 +0000 UTC" firstStartedPulling="2025-12-03 19:45:15.506862404 +0000 UTC m=+931.469672670" lastFinishedPulling="2025-12-03 19:45:19.805577163 +0000 UTC m=+935.768387429" observedRunningTime="2025-12-03 19:45:20.40409153 +0000 UTC m=+936.366901806" watchObservedRunningTime="2025-12-03 19:45:20.407983216 +0000 UTC m=+936.370793482" Dec 03 19:45:25 crc kubenswrapper[4916]: I1203 19:45:25.258273 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-8547fd68fd-vc7gd" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.668724 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.670742 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.672640 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-x8qq9" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.680781 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.682253 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.691030 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-vgpjv" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.693722 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.695530 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.697371 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-wn2zv" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.699961 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.714184 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.724190 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.725395 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.726987 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-pzzrh" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.736787 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.745107 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.751377 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.752488 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.756826 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-g2vt6" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.765144 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.775187 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.776276 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.784030 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-v9jqj" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.784234 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-dth7w"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.786194 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.786775 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gk4h\" (UniqueName: \"kubernetes.io/projected/232cd6fc-5f1d-4398-ae7d-5c34f49843f3-kube-api-access-7gk4h\") pod \"cinder-operator-controller-manager-859b6ccc6-6pzj5\" (UID: \"232cd6fc-5f1d-4398-ae7d-5c34f49843f3\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.786809 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnlsr\" (UniqueName: \"kubernetes.io/projected/cf093783-d31b-42fc-a85f-fff6c35fdae8-kube-api-access-fnlsr\") pod \"designate-operator-controller-manager-78b4bc895b-nsfm2\" (UID: \"cf093783-d31b-42fc-a85f-fff6c35fdae8\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.786837 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkw5d\" (UniqueName: \"kubernetes.io/projected/a83c7d54-9430-456f-b83e-abed5d9030b8-kube-api-access-vkw5d\") pod \"glance-operator-controller-manager-77987cd8cd-x6tb6\" (UID: \"a83c7d54-9430-456f-b83e-abed5d9030b8\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.786855 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8ttb\" (UniqueName: \"kubernetes.io/projected/d2c7e9c7-96cd-47bd-978a-c3fd41c74089-kube-api-access-r8ttb\") pod \"barbican-operator-controller-manager-7d9dfd778-xn476\" (UID: \"d2c7e9c7-96cd-47bd-978a-c3fd41c74089\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.798602 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-dth7w"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.799170 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.799395 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-hkvfr" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.805652 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.811971 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.813040 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.816719 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-kkv5g" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.828592 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.829675 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.832550 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-sdrsd" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.833603 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.834718 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.837129 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-r6t94" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.854290 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.866801 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.872890 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.882440 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.883451 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.885126 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-nfvll" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.888476 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert\") pod \"infra-operator-controller-manager-57548d458d-dth7w\" (UID: \"a571d18b-686d-472d-9086-e192ec504db4\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.888714 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtf9x\" (UniqueName: \"kubernetes.io/projected/b63d5bf7-8901-4bce-90d9-0006ae946230-kube-api-access-xtf9x\") pod \"heat-operator-controller-manager-5f64f6f8bb-csdvv\" (UID: \"b63d5bf7-8901-4bce-90d9-0006ae946230\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.888802 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gtxs\" (UniqueName: \"kubernetes.io/projected/7303e0f2-e41d-4220-a72b-88e6b44b016c-kube-api-access-9gtxs\") pod \"manila-operator-controller-manager-7c79b5df47-sd86t\" (UID: \"7303e0f2-e41d-4220-a72b-88e6b44b016c\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.888902 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr968\" (UniqueName: \"kubernetes.io/projected/907117b8-0a09-440c-bb47-bfa09ccec80b-kube-api-access-fr968\") pod \"ironic-operator-controller-manager-6c548fd776-jltdg\" (UID: \"907117b8-0a09-440c-bb47-bfa09ccec80b\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.888997 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gk4h\" (UniqueName: \"kubernetes.io/projected/232cd6fc-5f1d-4398-ae7d-5c34f49843f3-kube-api-access-7gk4h\") pod \"cinder-operator-controller-manager-859b6ccc6-6pzj5\" (UID: \"232cd6fc-5f1d-4398-ae7d-5c34f49843f3\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.889069 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnlsr\" (UniqueName: \"kubernetes.io/projected/cf093783-d31b-42fc-a85f-fff6c35fdae8-kube-api-access-fnlsr\") pod \"designate-operator-controller-manager-78b4bc895b-nsfm2\" (UID: \"cf093783-d31b-42fc-a85f-fff6c35fdae8\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.889168 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xff7\" (UniqueName: \"kubernetes.io/projected/a571d18b-686d-472d-9086-e192ec504db4-kube-api-access-6xff7\") pod \"infra-operator-controller-manager-57548d458d-dth7w\" (UID: \"a571d18b-686d-472d-9086-e192ec504db4\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.889239 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh7xm\" (UniqueName: \"kubernetes.io/projected/877f6f89-deed-4f06-adb1-cfa6b5254db2-kube-api-access-hh7xm\") pod \"keystone-operator-controller-manager-7765d96ddf-6x48f\" (UID: \"877f6f89-deed-4f06-adb1-cfa6b5254db2\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.889304 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dplvh\" (UniqueName: \"kubernetes.io/projected/31355b8f-c9a1-4ddf-a97f-de6d4f506a67-kube-api-access-dplvh\") pod \"horizon-operator-controller-manager-68c6d99b8f-kndn4\" (UID: \"31355b8f-c9a1-4ddf-a97f-de6d4f506a67\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.889370 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkw5d\" (UniqueName: \"kubernetes.io/projected/a83c7d54-9430-456f-b83e-abed5d9030b8-kube-api-access-vkw5d\") pod \"glance-operator-controller-manager-77987cd8cd-x6tb6\" (UID: \"a83c7d54-9430-456f-b83e-abed5d9030b8\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.889443 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8ttb\" (UniqueName: \"kubernetes.io/projected/d2c7e9c7-96cd-47bd-978a-c3fd41c74089-kube-api-access-r8ttb\") pod \"barbican-operator-controller-manager-7d9dfd778-xn476\" (UID: \"d2c7e9c7-96cd-47bd-978a-c3fd41c74089\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.899735 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.928453 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.937053 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkw5d\" (UniqueName: \"kubernetes.io/projected/a83c7d54-9430-456f-b83e-abed5d9030b8-kube-api-access-vkw5d\") pod \"glance-operator-controller-manager-77987cd8cd-x6tb6\" (UID: \"a83c7d54-9430-456f-b83e-abed5d9030b8\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.941964 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8ttb\" (UniqueName: \"kubernetes.io/projected/d2c7e9c7-96cd-47bd-978a-c3fd41c74089-kube-api-access-r8ttb\") pod \"barbican-operator-controller-manager-7d9dfd778-xn476\" (UID: \"d2c7e9c7-96cd-47bd-978a-c3fd41c74089\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.947915 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.952031 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnlsr\" (UniqueName: \"kubernetes.io/projected/cf093783-d31b-42fc-a85f-fff6c35fdae8-kube-api-access-fnlsr\") pod \"designate-operator-controller-manager-78b4bc895b-nsfm2\" (UID: \"cf093783-d31b-42fc-a85f-fff6c35fdae8\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.959427 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.971763 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gk4h\" (UniqueName: \"kubernetes.io/projected/232cd6fc-5f1d-4398-ae7d-5c34f49843f3-kube-api-access-7gk4h\") pod \"cinder-operator-controller-manager-859b6ccc6-6pzj5\" (UID: \"232cd6fc-5f1d-4398-ae7d-5c34f49843f3\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.975308 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm"] Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.976349 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.985229 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-l6xmn" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.985346 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-zmlz5" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.990219 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh7xm\" (UniqueName: \"kubernetes.io/projected/877f6f89-deed-4f06-adb1-cfa6b5254db2-kube-api-access-hh7xm\") pod \"keystone-operator-controller-manager-7765d96ddf-6x48f\" (UID: \"877f6f89-deed-4f06-adb1-cfa6b5254db2\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.990268 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dplvh\" (UniqueName: \"kubernetes.io/projected/31355b8f-c9a1-4ddf-a97f-de6d4f506a67-kube-api-access-dplvh\") pod \"horizon-operator-controller-manager-68c6d99b8f-kndn4\" (UID: \"31355b8f-c9a1-4ddf-a97f-de6d4f506a67\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.990300 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert\") pod \"infra-operator-controller-manager-57548d458d-dth7w\" (UID: \"a571d18b-686d-472d-9086-e192ec504db4\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.990334 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdjfl\" (UniqueName: \"kubernetes.io/projected/dc81071a-3da1-4e63-b733-13e39ecfb823-kube-api-access-fdjfl\") pod \"mariadb-operator-controller-manager-56bbcc9d85-7q6dq\" (UID: \"dc81071a-3da1-4e63-b733-13e39ecfb823\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.990370 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtf9x\" (UniqueName: \"kubernetes.io/projected/b63d5bf7-8901-4bce-90d9-0006ae946230-kube-api-access-xtf9x\") pod \"heat-operator-controller-manager-5f64f6f8bb-csdvv\" (UID: \"b63d5bf7-8901-4bce-90d9-0006ae946230\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.990390 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gtxs\" (UniqueName: \"kubernetes.io/projected/7303e0f2-e41d-4220-a72b-88e6b44b016c-kube-api-access-9gtxs\") pod \"manila-operator-controller-manager-7c79b5df47-sd86t\" (UID: \"7303e0f2-e41d-4220-a72b-88e6b44b016c\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.990417 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr968\" (UniqueName: \"kubernetes.io/projected/907117b8-0a09-440c-bb47-bfa09ccec80b-kube-api-access-fr968\") pod \"ironic-operator-controller-manager-6c548fd776-jltdg\" (UID: \"907117b8-0a09-440c-bb47-bfa09ccec80b\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.990448 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbjmz\" (UniqueName: \"kubernetes.io/projected/251c39c1-e63c-4772-a0e6-88528867a64d-kube-api-access-cbjmz\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-mllvg\" (UID: \"251c39c1-e63c-4772-a0e6-88528867a64d\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" Dec 03 19:46:03 crc kubenswrapper[4916]: I1203 19:46:03.990473 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xff7\" (UniqueName: \"kubernetes.io/projected/a571d18b-686d-472d-9086-e192ec504db4-kube-api-access-6xff7\") pod \"infra-operator-controller-manager-57548d458d-dth7w\" (UID: \"a571d18b-686d-472d-9086-e192ec504db4\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:03 crc kubenswrapper[4916]: E1203 19:46:03.991039 4916 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 19:46:03 crc kubenswrapper[4916]: E1203 19:46:03.991099 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert podName:a571d18b-686d-472d-9086-e192ec504db4 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:04.491079276 +0000 UTC m=+980.453889542 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert") pod "infra-operator-controller-manager-57548d458d-dth7w" (UID: "a571d18b-686d-472d-9086-e192ec504db4") : secret "infra-operator-webhook-server-cert" not found Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.003167 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.009847 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.011200 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.014656 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.023336 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.026453 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtf9x\" (UniqueName: \"kubernetes.io/projected/b63d5bf7-8901-4bce-90d9-0006ae946230-kube-api-access-xtf9x\") pod \"heat-operator-controller-manager-5f64f6f8bb-csdvv\" (UID: \"b63d5bf7-8901-4bce-90d9-0006ae946230\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.027958 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr968\" (UniqueName: \"kubernetes.io/projected/907117b8-0a09-440c-bb47-bfa09ccec80b-kube-api-access-fr968\") pod \"ironic-operator-controller-manager-6c548fd776-jltdg\" (UID: \"907117b8-0a09-440c-bb47-bfa09ccec80b\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.036827 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-t4f69" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.048527 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.051228 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.052967 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dplvh\" (UniqueName: \"kubernetes.io/projected/31355b8f-c9a1-4ddf-a97f-de6d4f506a67-kube-api-access-dplvh\") pod \"horizon-operator-controller-manager-68c6d99b8f-kndn4\" (UID: \"31355b8f-c9a1-4ddf-a97f-de6d4f506a67\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.062550 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xff7\" (UniqueName: \"kubernetes.io/projected/a571d18b-686d-472d-9086-e192ec504db4-kube-api-access-6xff7\") pod \"infra-operator-controller-manager-57548d458d-dth7w\" (UID: \"a571d18b-686d-472d-9086-e192ec504db4\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.062814 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.069914 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.071024 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.074823 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh7xm\" (UniqueName: \"kubernetes.io/projected/877f6f89-deed-4f06-adb1-cfa6b5254db2-kube-api-access-hh7xm\") pod \"keystone-operator-controller-manager-7765d96ddf-6x48f\" (UID: \"877f6f89-deed-4f06-adb1-cfa6b5254db2\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.076654 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gtxs\" (UniqueName: \"kubernetes.io/projected/7303e0f2-e41d-4220-a72b-88e6b44b016c-kube-api-access-9gtxs\") pod \"manila-operator-controller-manager-7c79b5df47-sd86t\" (UID: \"7303e0f2-e41d-4220-a72b-88e6b44b016c\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.078728 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.079514 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-rgjs7" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.081088 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.085405 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.086460 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.088943 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-8z946" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.093967 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdjfl\" (UniqueName: \"kubernetes.io/projected/dc81071a-3da1-4e63-b733-13e39ecfb823-kube-api-access-fdjfl\") pod \"mariadb-operator-controller-manager-56bbcc9d85-7q6dq\" (UID: \"dc81071a-3da1-4e63-b733-13e39ecfb823\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.094033 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vpqh\" (UniqueName: \"kubernetes.io/projected/fc0e679d-4033-4479-ba7a-cdc160e0b6ad-kube-api-access-7vpqh\") pod \"octavia-operator-controller-manager-998648c74-wp7lm\" (UID: \"fc0e679d-4033-4479-ba7a-cdc160e0b6ad\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.094118 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwc4r\" (UniqueName: \"kubernetes.io/projected/462898ce-79ab-4cd6-b05e-e19b65c80fa1-kube-api-access-rwc4r\") pod \"nova-operator-controller-manager-697bc559fc-29mr4\" (UID: \"462898ce-79ab-4cd6-b05e-e19b65c80fa1\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.094141 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbjmz\" (UniqueName: \"kubernetes.io/projected/251c39c1-e63c-4772-a0e6-88528867a64d-kube-api-access-cbjmz\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-mllvg\" (UID: \"251c39c1-e63c-4772-a0e6-88528867a64d\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.105857 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.107934 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.146196 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbjmz\" (UniqueName: \"kubernetes.io/projected/251c39c1-e63c-4772-a0e6-88528867a64d-kube-api-access-cbjmz\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-mllvg\" (UID: \"251c39c1-e63c-4772-a0e6-88528867a64d\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.147370 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdjfl\" (UniqueName: \"kubernetes.io/projected/dc81071a-3da1-4e63-b733-13e39ecfb823-kube-api-access-fdjfl\") pod \"mariadb-operator-controller-manager-56bbcc9d85-7q6dq\" (UID: \"dc81071a-3da1-4e63-b733-13e39ecfb823\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.149010 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.149344 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.163131 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.169836 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.170972 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.173811 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-k79n4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.173965 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.180957 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.182041 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.188628 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.194860 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.196497 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwc4r\" (UniqueName: \"kubernetes.io/projected/462898ce-79ab-4cd6-b05e-e19b65c80fa1-kube-api-access-rwc4r\") pod \"nova-operator-controller-manager-697bc559fc-29mr4\" (UID: \"462898ce-79ab-4cd6-b05e-e19b65c80fa1\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.196576 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qq87f\" (UniqueName: \"kubernetes.io/projected/c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae-kube-api-access-qq87f\") pod \"ovn-operator-controller-manager-b6456fdb6-cjms4\" (UID: \"c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.196602 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vvtf\" (UniqueName: \"kubernetes.io/projected/f356aae7-fed8-4f1b-a863-d7b47bcda904-kube-api-access-5vvtf\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.196626 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.196677 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vpqh\" (UniqueName: \"kubernetes.io/projected/fc0e679d-4033-4479-ba7a-cdc160e0b6ad-kube-api-access-7vpqh\") pod \"octavia-operator-controller-manager-998648c74-wp7lm\" (UID: \"fc0e679d-4033-4479-ba7a-cdc160e0b6ad\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.201009 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-5967t" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.209800 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.211531 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.212499 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.215084 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-8kh5r" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.216369 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwc4r\" (UniqueName: \"kubernetes.io/projected/462898ce-79ab-4cd6-b05e-e19b65c80fa1-kube-api-access-rwc4r\") pod \"nova-operator-controller-manager-697bc559fc-29mr4\" (UID: \"462898ce-79ab-4cd6-b05e-e19b65c80fa1\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.216659 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.226699 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-2txxc"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.227820 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.238302 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-hnvhd" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.249065 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-2txxc"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.267207 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vpqh\" (UniqueName: \"kubernetes.io/projected/fc0e679d-4033-4479-ba7a-cdc160e0b6ad-kube-api-access-7vpqh\") pod \"octavia-operator-controller-manager-998648c74-wp7lm\" (UID: \"fc0e679d-4033-4479-ba7a-cdc160e0b6ad\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.299074 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.300100 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.300371 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r4gx\" (UniqueName: \"kubernetes.io/projected/8829329b-8de5-4a0d-bd48-9cb7338c2dd1-kube-api-access-4r4gx\") pod \"placement-operator-controller-manager-78f8948974-xvp6h\" (UID: \"8829329b-8de5-4a0d-bd48-9cb7338c2dd1\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.300425 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmg57\" (UniqueName: \"kubernetes.io/projected/04ce9fc2-2134-4d10-b3d8-764bca295eed-kube-api-access-bmg57\") pod \"swift-operator-controller-manager-5f8c65bbfc-7bs98\" (UID: \"04ce9fc2-2134-4d10-b3d8-764bca295eed\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.300462 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgh9n\" (UniqueName: \"kubernetes.io/projected/a5bc0003-390d-477e-8b21-f7fda61cb051-kube-api-access-hgh9n\") pod \"test-operator-controller-manager-5854674fcc-2txxc\" (UID: \"a5bc0003-390d-477e-8b21-f7fda61cb051\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.300485 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qq87f\" (UniqueName: \"kubernetes.io/projected/c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae-kube-api-access-qq87f\") pod \"ovn-operator-controller-manager-b6456fdb6-cjms4\" (UID: \"c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.300507 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vvtf\" (UniqueName: \"kubernetes.io/projected/f356aae7-fed8-4f1b-a863-d7b47bcda904-kube-api-access-5vvtf\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.300535 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.302780 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2w6mm\" (UniqueName: \"kubernetes.io/projected/6f4635b6-2410-4d5f-a7c9-3cf0a04739f7-kube-api-access-2w6mm\") pod \"telemetry-operator-controller-manager-65b6f7cdd5-fbb7s\" (UID: \"6f4635b6-2410-4d5f-a7c9-3cf0a04739f7\") " pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" Dec 03 19:46:04 crc kubenswrapper[4916]: E1203 19:46:04.303705 4916 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:04 crc kubenswrapper[4916]: E1203 19:46:04.303793 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert podName:f356aae7-fed8-4f1b-a863-d7b47bcda904 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:04.803771906 +0000 UTC m=+980.766582172 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" (UID: "f356aae7-fed8-4f1b-a863-d7b47bcda904") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.311504 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.322102 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-vlh4p" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.326214 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vvtf\" (UniqueName: \"kubernetes.io/projected/f356aae7-fed8-4f1b-a863-d7b47bcda904-kube-api-access-5vvtf\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.356936 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qq87f\" (UniqueName: \"kubernetes.io/projected/c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae-kube-api-access-qq87f\") pod \"ovn-operator-controller-manager-b6456fdb6-cjms4\" (UID: \"c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.395206 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.400898 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.401523 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.404824 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9644k\" (UniqueName: \"kubernetes.io/projected/463726a8-9ad4-486b-b5b8-166fed3a6190-kube-api-access-9644k\") pod \"watcher-operator-controller-manager-769dc69bc-7vz8k\" (UID: \"463726a8-9ad4-486b-b5b8-166fed3a6190\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.404883 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r4gx\" (UniqueName: \"kubernetes.io/projected/8829329b-8de5-4a0d-bd48-9cb7338c2dd1-kube-api-access-4r4gx\") pod \"placement-operator-controller-manager-78f8948974-xvp6h\" (UID: \"8829329b-8de5-4a0d-bd48-9cb7338c2dd1\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.404908 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmg57\" (UniqueName: \"kubernetes.io/projected/04ce9fc2-2134-4d10-b3d8-764bca295eed-kube-api-access-bmg57\") pod \"swift-operator-controller-manager-5f8c65bbfc-7bs98\" (UID: \"04ce9fc2-2134-4d10-b3d8-764bca295eed\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.404934 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgh9n\" (UniqueName: \"kubernetes.io/projected/a5bc0003-390d-477e-8b21-f7fda61cb051-kube-api-access-hgh9n\") pod \"test-operator-controller-manager-5854674fcc-2txxc\" (UID: \"a5bc0003-390d-477e-8b21-f7fda61cb051\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.405006 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2w6mm\" (UniqueName: \"kubernetes.io/projected/6f4635b6-2410-4d5f-a7c9-3cf0a04739f7-kube-api-access-2w6mm\") pod \"telemetry-operator-controller-manager-65b6f7cdd5-fbb7s\" (UID: \"6f4635b6-2410-4d5f-a7c9-3cf0a04739f7\") " pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.424975 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmg57\" (UniqueName: \"kubernetes.io/projected/04ce9fc2-2134-4d10-b3d8-764bca295eed-kube-api-access-bmg57\") pod \"swift-operator-controller-manager-5f8c65bbfc-7bs98\" (UID: \"04ce9fc2-2134-4d10-b3d8-764bca295eed\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.426281 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2w6mm\" (UniqueName: \"kubernetes.io/projected/6f4635b6-2410-4d5f-a7c9-3cf0a04739f7-kube-api-access-2w6mm\") pod \"telemetry-operator-controller-manager-65b6f7cdd5-fbb7s\" (UID: \"6f4635b6-2410-4d5f-a7c9-3cf0a04739f7\") " pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.429662 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r4gx\" (UniqueName: \"kubernetes.io/projected/8829329b-8de5-4a0d-bd48-9cb7338c2dd1-kube-api-access-4r4gx\") pod \"placement-operator-controller-manager-78f8948974-xvp6h\" (UID: \"8829329b-8de5-4a0d-bd48-9cb7338c2dd1\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.442590 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgh9n\" (UniqueName: \"kubernetes.io/projected/a5bc0003-390d-477e-8b21-f7fda61cb051-kube-api-access-hgh9n\") pod \"test-operator-controller-manager-5854674fcc-2txxc\" (UID: \"a5bc0003-390d-477e-8b21-f7fda61cb051\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.478720 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.512868 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert\") pod \"infra-operator-controller-manager-57548d458d-dth7w\" (UID: \"a571d18b-686d-472d-9086-e192ec504db4\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.512994 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9644k\" (UniqueName: \"kubernetes.io/projected/463726a8-9ad4-486b-b5b8-166fed3a6190-kube-api-access-9644k\") pod \"watcher-operator-controller-manager-769dc69bc-7vz8k\" (UID: \"463726a8-9ad4-486b-b5b8-166fed3a6190\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" Dec 03 19:46:04 crc kubenswrapper[4916]: E1203 19:46:04.513467 4916 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 19:46:04 crc kubenswrapper[4916]: E1203 19:46:04.513652 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert podName:a571d18b-686d-472d-9086-e192ec504db4 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:05.513624975 +0000 UTC m=+981.476435241 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert") pod "infra-operator-controller-manager-57548d458d-dth7w" (UID: "a571d18b-686d-472d-9086-e192ec504db4") : secret "infra-operator-webhook-server-cert" not found Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.515609 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.544731 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9644k\" (UniqueName: \"kubernetes.io/projected/463726a8-9ad4-486b-b5b8-166fed3a6190-kube-api-access-9644k\") pod \"watcher-operator-controller-manager-769dc69bc-7vz8k\" (UID: \"463726a8-9ad4-486b-b5b8-166fed3a6190\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.555688 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.561024 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.561862 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.561878 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.562354 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.562400 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.563010 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.565462 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-xpqmh" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.567794 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.567918 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.568013 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-d9rzs" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.614787 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.616514 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.616641 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.616679 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpvrl\" (UniqueName: \"kubernetes.io/projected/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-kube-api-access-gpvrl\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.616713 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6msx\" (UniqueName: \"kubernetes.io/projected/1a7e3254-35ac-48fa-8ab7-11e85c780369-kube-api-access-n6msx\") pod \"rabbitmq-cluster-operator-manager-668c99d594-m5s6k\" (UID: \"1a7e3254-35ac-48fa-8ab7-11e85c780369\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.630899 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.652190 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.717510 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.717549 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.717589 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpvrl\" (UniqueName: \"kubernetes.io/projected/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-kube-api-access-gpvrl\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.717618 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6msx\" (UniqueName: \"kubernetes.io/projected/1a7e3254-35ac-48fa-8ab7-11e85c780369-kube-api-access-n6msx\") pod \"rabbitmq-cluster-operator-manager-668c99d594-m5s6k\" (UID: \"1a7e3254-35ac-48fa-8ab7-11e85c780369\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k" Dec 03 19:46:04 crc kubenswrapper[4916]: E1203 19:46:04.717752 4916 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 19:46:04 crc kubenswrapper[4916]: E1203 19:46:04.717796 4916 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 19:46:04 crc kubenswrapper[4916]: E1203 19:46:04.717874 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:05.217849615 +0000 UTC m=+981.180659881 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "webhook-server-cert" not found Dec 03 19:46:04 crc kubenswrapper[4916]: E1203 19:46:04.717935 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:05.217903656 +0000 UTC m=+981.180713992 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "metrics-server-cert" not found Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.736363 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpvrl\" (UniqueName: \"kubernetes.io/projected/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-kube-api-access-gpvrl\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.739146 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6msx\" (UniqueName: \"kubernetes.io/projected/1a7e3254-35ac-48fa-8ab7-11e85c780369-kube-api-access-n6msx\") pod \"rabbitmq-cluster-operator-manager-668c99d594-m5s6k\" (UID: \"1a7e3254-35ac-48fa-8ab7-11e85c780369\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.747242 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.818812 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:04 crc kubenswrapper[4916]: E1203 19:46:04.818998 4916 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:04 crc kubenswrapper[4916]: E1203 19:46:04.819444 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert podName:f356aae7-fed8-4f1b-a863-d7b47bcda904 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:05.81941722 +0000 UTC m=+981.782227486 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" (UID: "f356aae7-fed8-4f1b-a863-d7b47bcda904") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.879964 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.904464 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2"] Dec 03 19:46:04 crc kubenswrapper[4916]: W1203 19:46:04.931377 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf093783_d31b_42fc_a85f_fff6c35fdae8.slice/crio-6993c2679664c3c8b3c07ea1cebaeece458cb28a8c5c3bf69b0bacba4973a04d WatchSource:0}: Error finding container 6993c2679664c3c8b3c07ea1cebaeece458cb28a8c5c3bf69b0bacba4973a04d: Status 404 returned error can't find the container with id 6993c2679664c3c8b3c07ea1cebaeece458cb28a8c5c3bf69b0bacba4973a04d Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.932314 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k" Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.979880 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5"] Dec 03 19:46:04 crc kubenswrapper[4916]: I1203 19:46:04.995170 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4"] Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.138693 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv"] Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.145139 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f"] Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.150473 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg"] Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.224438 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.225022 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.224743 4916 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.229980 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:06.229951666 +0000 UTC m=+982.192761932 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "metrics-server-cert" not found Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.225100 4916 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.230517 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:06.230505951 +0000 UTC m=+982.193316217 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "webhook-server-cert" not found Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.537432 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert\") pod \"infra-operator-controller-manager-57548d458d-dth7w\" (UID: \"a571d18b-686d-472d-9086-e192ec504db4\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.537637 4916 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.537691 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert podName:a571d18b-686d-472d-9086-e192ec504db4 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:07.537673112 +0000 UTC m=+983.500483378 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert") pod "infra-operator-controller-manager-57548d458d-dth7w" (UID: "a571d18b-686d-472d-9086-e192ec504db4") : secret "infra-operator-webhook-server-cert" not found Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.554306 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h"] Dec 03 19:46:05 crc kubenswrapper[4916]: W1203 19:46:05.562970 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8829329b_8de5_4a0d_bd48_9cb7338c2dd1.slice/crio-400b594fc2757b1561753cb8a09a64d3a1244bbc59b8006eb3e42cb6cd03d154 WatchSource:0}: Error finding container 400b594fc2757b1561753cb8a09a64d3a1244bbc59b8006eb3e42cb6cd03d154: Status 404 returned error can't find the container with id 400b594fc2757b1561753cb8a09a64d3a1244bbc59b8006eb3e42cb6cd03d154 Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.585308 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq"] Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.588742 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg"] Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.603648 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4"] Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.608878 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm"] Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.613887 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s"] Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.620228 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t"] Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.626276 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k"] Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.632761 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-2txxc"] Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.635202 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.51:5001/openstack-k8s-operators/telemetry-operator:d07f0c040df35512abbef117adfe7592815c4ffb,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2w6mm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_openstack-operators(6f4635b6-2410-4d5f-a7c9-3cf0a04739f7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.636231 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4"] Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.637729 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2w6mm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_openstack-operators(6f4635b6-2410-4d5f-a7c9-3cf0a04739f7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.638815 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" podUID="6f4635b6-2410-4d5f-a7c9-3cf0a04739f7" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.639145 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9644k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-7vz8k_openstack-operators(463726a8-9ad4-486b-b5b8-166fed3a6190): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.642492 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98"] Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.643662 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9644k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-7vz8k_openstack-operators(463726a8-9ad4-486b-b5b8-166fed3a6190): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.644785 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" podUID="463726a8-9ad4-486b-b5b8-166fed3a6190" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.645384 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7vpqh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-wp7lm_openstack-operators(fc0e679d-4033-4479-ba7a-cdc160e0b6ad): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.648334 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7vpqh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-wp7lm_openstack-operators(fc0e679d-4033-4479-ba7a-cdc160e0b6ad): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.648509 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rwc4r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-29mr4_openstack-operators(462898ce-79ab-4cd6-b05e-e19b65c80fa1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.649591 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" podUID="fc0e679d-4033-4479-ba7a-cdc160e0b6ad" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.650154 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rwc4r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-29mr4_openstack-operators(462898ce-79ab-4cd6-b05e-e19b65c80fa1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.650518 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k"] Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.651314 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" podUID="462898ce-79ab-4cd6-b05e-e19b65c80fa1" Dec 03 19:46:05 crc kubenswrapper[4916]: W1203 19:46:05.662082 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5bc0003_390d_477e_8b21_f7fda61cb051.slice/crio-964dad4fb9e71dadb158486433ebbb7c94fa4163fec275469a80123b49cfa1cc WatchSource:0}: Error finding container 964dad4fb9e71dadb158486433ebbb7c94fa4163fec275469a80123b49cfa1cc: Status 404 returned error can't find the container with id 964dad4fb9e71dadb158486433ebbb7c94fa4163fec275469a80123b49cfa1cc Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.662266 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cbjmz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-mllvg_openstack-operators(251c39c1-e63c-4772-a0e6-88528867a64d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.664424 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cbjmz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-mllvg_openstack-operators(251c39c1-e63c-4772-a0e6-88528867a64d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.665515 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" podUID="251c39c1-e63c-4772-a0e6-88528867a64d" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.673691 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hgh9n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-2txxc_openstack-operators(a5bc0003-390d-477e-8b21-f7fda61cb051): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.676500 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hgh9n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-2txxc_openstack-operators(a5bc0003-390d-477e-8b21-f7fda61cb051): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.677629 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" podUID="a5bc0003-390d-477e-8b21-f7fda61cb051" Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.750389 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f" event={"ID":"877f6f89-deed-4f06-adb1-cfa6b5254db2","Type":"ContainerStarted","Data":"d5dc08638471f47c8ab72ed3e1c589538f1cb8204b78fd5e292f87e4e11afb32"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.755169 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" event={"ID":"8829329b-8de5-4a0d-bd48-9cb7338c2dd1","Type":"ContainerStarted","Data":"400b594fc2757b1561753cb8a09a64d3a1244bbc59b8006eb3e42cb6cd03d154"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.757029 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k" event={"ID":"1a7e3254-35ac-48fa-8ab7-11e85c780369","Type":"ContainerStarted","Data":"1ccc1a6f7bff55d1667004b8b22b193d8abe8e3d97933f51d803bf86ebc45577"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.759538 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6" event={"ID":"a83c7d54-9430-456f-b83e-abed5d9030b8","Type":"ContainerStarted","Data":"f0146f607a8773d9a6b3beaad1bb6097f1ae5ace1f1feb0d7aaf02f727da96f7"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.764304 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2" event={"ID":"cf093783-d31b-42fc-a85f-fff6c35fdae8","Type":"ContainerStarted","Data":"6993c2679664c3c8b3c07ea1cebaeece458cb28a8c5c3bf69b0bacba4973a04d"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.766370 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq" event={"ID":"dc81071a-3da1-4e63-b733-13e39ecfb823","Type":"ContainerStarted","Data":"fe8fc824a239eb286987541e74be93a562981139b75f78debe847d1e4a80c296"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.767639 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" event={"ID":"251c39c1-e63c-4772-a0e6-88528867a64d","Type":"ContainerStarted","Data":"d8267d9fd24b38513cfdde261f116c893076f74179c9c041eac9d0dc57b9cc3a"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.776784 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4" event={"ID":"c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae","Type":"ContainerStarted","Data":"0fa48d843ab28652d4631d4013e039b486c9a1870c06ef71d06cd9bd01a2acaa"} Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.777987 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" podUID="251c39c1-e63c-4772-a0e6-88528867a64d" Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.778337 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t" event={"ID":"7303e0f2-e41d-4220-a72b-88e6b44b016c","Type":"ContainerStarted","Data":"352109d325c6906876d3ea98b7ad1f7d85d745882a8e9cc73d5f06cdececc9ad"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.790061 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" event={"ID":"463726a8-9ad4-486b-b5b8-166fed3a6190","Type":"ContainerStarted","Data":"42b6bb2d8892ebcc1c8bfd33e6abe2e052c494927ec12d444ead7c061a18429f"} Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.800012 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" podUID="463726a8-9ad4-486b-b5b8-166fed3a6190" Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.804857 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" event={"ID":"31355b8f-c9a1-4ddf-a97f-de6d4f506a67","Type":"ContainerStarted","Data":"9419a7481b16692f1d3c0a7a4f89fe86ca2645d657c2134e1c264298df7efb0c"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.814035 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" event={"ID":"d2c7e9c7-96cd-47bd-978a-c3fd41c74089","Type":"ContainerStarted","Data":"98e7616eb82fd9d9e3424bf35aecec45aa01be4c1c77db78cd1ab0aa6bf100ca"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.816741 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg" event={"ID":"907117b8-0a09-440c-bb47-bfa09ccec80b","Type":"ContainerStarted","Data":"75181a326b26db352fe315b17712af62ef24ef77f93a7542bcb22feeb0aad817"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.818543 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98" event={"ID":"04ce9fc2-2134-4d10-b3d8-764bca295eed","Type":"ContainerStarted","Data":"7aa84b9d84c1e6a35ab907fbead91f034f4e90cff257d7ff083b89ec02894439"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.819776 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" event={"ID":"462898ce-79ab-4cd6-b05e-e19b65c80fa1","Type":"ContainerStarted","Data":"3d95a11f9d5f6ae51e9c549a192878ca9da6a2f65ddb2e51593a0ccf6df35b98"} Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.824661 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" podUID="462898ce-79ab-4cd6-b05e-e19b65c80fa1" Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.825035 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5" event={"ID":"232cd6fc-5f1d-4398-ae7d-5c34f49843f3","Type":"ContainerStarted","Data":"e78018cd7f851cfc0594bca557ed13a5fe06323ed0a5c3000764c3e7619266f7"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.829146 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" event={"ID":"6f4635b6-2410-4d5f-a7c9-3cf0a04739f7","Type":"ContainerStarted","Data":"8ace5a1920e93eff279e976ab668886ac51a801a2a0aa9ab2fcf420114432bb6"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.830935 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" event={"ID":"a5bc0003-390d-477e-8b21-f7fda61cb051","Type":"ContainerStarted","Data":"964dad4fb9e71dadb158486433ebbb7c94fa4163fec275469a80123b49cfa1cc"} Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.832432 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.51:5001/openstack-k8s-operators/telemetry-operator:d07f0c040df35512abbef117adfe7592815c4ffb\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" podUID="6f4635b6-2410-4d5f-a7c9-3cf0a04739f7" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.844764 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" podUID="a5bc0003-390d-477e-8b21-f7fda61cb051" Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.845763 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv" event={"ID":"b63d5bf7-8901-4bce-90d9-0006ae946230","Type":"ContainerStarted","Data":"7a8cdad584d7a0fb9c7d794285913d5c6e26528b1b658fd99e59fca09ca59842"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.847206 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" event={"ID":"fc0e679d-4033-4479-ba7a-cdc160e0b6ad","Type":"ContainerStarted","Data":"48962108045a6cf654cd609df7c394d6e2333e6f037d3ed06da9da0c75a72db0"} Dec 03 19:46:05 crc kubenswrapper[4916]: I1203 19:46:05.852282 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.852721 4916 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.852852 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert podName:f356aae7-fed8-4f1b-a863-d7b47bcda904 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:07.852811837 +0000 UTC m=+983.815622103 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" (UID: "f356aae7-fed8-4f1b-a863-d7b47bcda904") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:05 crc kubenswrapper[4916]: E1203 19:46:05.854110 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" podUID="fc0e679d-4033-4479-ba7a-cdc160e0b6ad" Dec 03 19:46:06 crc kubenswrapper[4916]: I1203 19:46:06.259126 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:06 crc kubenswrapper[4916]: I1203 19:46:06.259392 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:06 crc kubenswrapper[4916]: E1203 19:46:06.259353 4916 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 19:46:06 crc kubenswrapper[4916]: E1203 19:46:06.259519 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:08.259499339 +0000 UTC m=+984.222309605 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "metrics-server-cert" not found Dec 03 19:46:06 crc kubenswrapper[4916]: E1203 19:46:06.259442 4916 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 19:46:06 crc kubenswrapper[4916]: E1203 19:46:06.259622 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:08.259594742 +0000 UTC m=+984.222405088 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "webhook-server-cert" not found Dec 03 19:46:06 crc kubenswrapper[4916]: E1203 19:46:06.860461 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" podUID="463726a8-9ad4-486b-b5b8-166fed3a6190" Dec 03 19:46:06 crc kubenswrapper[4916]: E1203 19:46:06.865222 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.51:5001/openstack-k8s-operators/telemetry-operator:d07f0c040df35512abbef117adfe7592815c4ffb\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" podUID="6f4635b6-2410-4d5f-a7c9-3cf0a04739f7" Dec 03 19:46:06 crc kubenswrapper[4916]: E1203 19:46:06.865369 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" podUID="251c39c1-e63c-4772-a0e6-88528867a64d" Dec 03 19:46:06 crc kubenswrapper[4916]: E1203 19:46:06.865490 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" podUID="462898ce-79ab-4cd6-b05e-e19b65c80fa1" Dec 03 19:46:06 crc kubenswrapper[4916]: E1203 19:46:06.865619 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" podUID="fc0e679d-4033-4479-ba7a-cdc160e0b6ad" Dec 03 19:46:06 crc kubenswrapper[4916]: E1203 19:46:06.866324 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" podUID="a5bc0003-390d-477e-8b21-f7fda61cb051" Dec 03 19:46:07 crc kubenswrapper[4916]: I1203 19:46:07.578676 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert\") pod \"infra-operator-controller-manager-57548d458d-dth7w\" (UID: \"a571d18b-686d-472d-9086-e192ec504db4\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:07 crc kubenswrapper[4916]: E1203 19:46:07.578833 4916 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 19:46:07 crc kubenswrapper[4916]: E1203 19:46:07.578911 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert podName:a571d18b-686d-472d-9086-e192ec504db4 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:11.578890273 +0000 UTC m=+987.541700539 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert") pod "infra-operator-controller-manager-57548d458d-dth7w" (UID: "a571d18b-686d-472d-9086-e192ec504db4") : secret "infra-operator-webhook-server-cert" not found Dec 03 19:46:07 crc kubenswrapper[4916]: I1203 19:46:07.882135 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:07 crc kubenswrapper[4916]: E1203 19:46:07.882433 4916 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:07 crc kubenswrapper[4916]: E1203 19:46:07.882522 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert podName:f356aae7-fed8-4f1b-a863-d7b47bcda904 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:11.88249338 +0000 UTC m=+987.845303646 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" (UID: "f356aae7-fed8-4f1b-a863-d7b47bcda904") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:08 crc kubenswrapper[4916]: I1203 19:46:08.288290 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:08 crc kubenswrapper[4916]: I1203 19:46:08.288859 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:08 crc kubenswrapper[4916]: E1203 19:46:08.288522 4916 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 19:46:08 crc kubenswrapper[4916]: E1203 19:46:08.289009 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:12.288979348 +0000 UTC m=+988.251789604 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "metrics-server-cert" not found Dec 03 19:46:08 crc kubenswrapper[4916]: E1203 19:46:08.289143 4916 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 19:46:08 crc kubenswrapper[4916]: E1203 19:46:08.289231 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:12.289204694 +0000 UTC m=+988.252015190 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "webhook-server-cert" not found Dec 03 19:46:11 crc kubenswrapper[4916]: I1203 19:46:11.642784 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert\") pod \"infra-operator-controller-manager-57548d458d-dth7w\" (UID: \"a571d18b-686d-472d-9086-e192ec504db4\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:11 crc kubenswrapper[4916]: E1203 19:46:11.643446 4916 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 19:46:11 crc kubenswrapper[4916]: E1203 19:46:11.643521 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert podName:a571d18b-686d-472d-9086-e192ec504db4 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:19.64350121 +0000 UTC m=+995.606311476 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert") pod "infra-operator-controller-manager-57548d458d-dth7w" (UID: "a571d18b-686d-472d-9086-e192ec504db4") : secret "infra-operator-webhook-server-cert" not found Dec 03 19:46:11 crc kubenswrapper[4916]: E1203 19:46:11.951949 4916 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:11 crc kubenswrapper[4916]: E1203 19:46:11.952034 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert podName:f356aae7-fed8-4f1b-a863-d7b47bcda904 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:19.952013458 +0000 UTC m=+995.914823724 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" (UID: "f356aae7-fed8-4f1b-a863-d7b47bcda904") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:11 crc kubenswrapper[4916]: I1203 19:46:11.951956 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:12 crc kubenswrapper[4916]: I1203 19:46:12.359440 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:12 crc kubenswrapper[4916]: I1203 19:46:12.359776 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:12 crc kubenswrapper[4916]: E1203 19:46:12.359720 4916 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 19:46:12 crc kubenswrapper[4916]: E1203 19:46:12.359932 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:20.359903153 +0000 UTC m=+996.322713489 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "metrics-server-cert" not found Dec 03 19:46:12 crc kubenswrapper[4916]: E1203 19:46:12.359966 4916 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 19:46:12 crc kubenswrapper[4916]: E1203 19:46:12.360019 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:20.360001145 +0000 UTC m=+996.322811411 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "webhook-server-cert" not found Dec 03 19:46:17 crc kubenswrapper[4916]: E1203 19:46:17.430260 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 03 19:46:17 crc kubenswrapper[4916]: E1203 19:46:17.431366 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4r4gx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-xvp6h_openstack-operators(8829329b-8de5-4a0d-bd48-9cb7338c2dd1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 19:46:18 crc kubenswrapper[4916]: E1203 19:46:18.113934 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 03 19:46:18 crc kubenswrapper[4916]: E1203 19:46:18.114346 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dplvh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-kndn4_openstack-operators(31355b8f-c9a1-4ddf-a97f-de6d4f506a67): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 19:46:18 crc kubenswrapper[4916]: E1203 19:46:18.724801 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 03 19:46:18 crc kubenswrapper[4916]: E1203 19:46:18.725342 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r8ttb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-xn476_openstack-operators(d2c7e9c7-96cd-47bd-978a-c3fd41c74089): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 19:46:19 crc kubenswrapper[4916]: E1203 19:46:19.258443 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 03 19:46:19 crc kubenswrapper[4916]: E1203 19:46:19.258636 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-n6msx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-m5s6k_openstack-operators(1a7e3254-35ac-48fa-8ab7-11e85c780369): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 19:46:19 crc kubenswrapper[4916]: E1203 19:46:19.260025 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k" podUID="1a7e3254-35ac-48fa-8ab7-11e85c780369" Dec 03 19:46:19 crc kubenswrapper[4916]: I1203 19:46:19.681443 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert\") pod \"infra-operator-controller-manager-57548d458d-dth7w\" (UID: \"a571d18b-686d-472d-9086-e192ec504db4\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:19 crc kubenswrapper[4916]: I1203 19:46:19.691386 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a571d18b-686d-472d-9086-e192ec504db4-cert\") pod \"infra-operator-controller-manager-57548d458d-dth7w\" (UID: \"a571d18b-686d-472d-9086-e192ec504db4\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:19 crc kubenswrapper[4916]: I1203 19:46:19.732988 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:19 crc kubenswrapper[4916]: I1203 19:46:19.973904 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4" event={"ID":"c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae","Type":"ContainerStarted","Data":"a7d5ae3a91fbb8d51fda5977108065060b95975e71071099726faf6a940f11d9"} Dec 03 19:46:19 crc kubenswrapper[4916]: I1203 19:46:19.978966 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6" event={"ID":"a83c7d54-9430-456f-b83e-abed5d9030b8","Type":"ContainerStarted","Data":"d08286ef65cceda035d1864bf59a770f86cf99273134abd3ec1d943a67a6c111"} Dec 03 19:46:19 crc kubenswrapper[4916]: I1203 19:46:19.983432 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5" event={"ID":"232cd6fc-5f1d-4398-ae7d-5c34f49843f3","Type":"ContainerStarted","Data":"56fe35a4e137fa06824d0b4dac4a913e0589630f63d02ce5902af2c5fc0f6742"} Dec 03 19:46:19 crc kubenswrapper[4916]: I1203 19:46:19.985056 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:19 crc kubenswrapper[4916]: E1203 19:46:19.985286 4916 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:19 crc kubenswrapper[4916]: E1203 19:46:19.985345 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert podName:f356aae7-fed8-4f1b-a863-d7b47bcda904 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:35.985328406 +0000 UTC m=+1011.948138672 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" (UID: "f356aae7-fed8-4f1b-a863-d7b47bcda904") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 19:46:20 crc kubenswrapper[4916]: I1203 19:46:20.005892 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f" event={"ID":"877f6f89-deed-4f06-adb1-cfa6b5254db2","Type":"ContainerStarted","Data":"0bc23400746da47056116826f6f950913ac58144d4755a0d420e64770ac75f6e"} Dec 03 19:46:20 crc kubenswrapper[4916]: I1203 19:46:20.025279 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg" event={"ID":"907117b8-0a09-440c-bb47-bfa09ccec80b","Type":"ContainerStarted","Data":"e6e601778b1c2361b5e426dd7c8e4acde106c8ab8bc40a8b580d0718d2a23b3f"} Dec 03 19:46:20 crc kubenswrapper[4916]: I1203 19:46:20.049929 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv" event={"ID":"b63d5bf7-8901-4bce-90d9-0006ae946230","Type":"ContainerStarted","Data":"ee096d622a55b4b425688fbe55f7c1dac9bd2580abc90f19e45fa3561ae8bd7b"} Dec 03 19:46:20 crc kubenswrapper[4916]: I1203 19:46:20.054338 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98" event={"ID":"04ce9fc2-2134-4d10-b3d8-764bca295eed","Type":"ContainerStarted","Data":"2be9028cd83c80228bd6031dddaa93b148385a4520ecaec8fa46a8d6320dc887"} Dec 03 19:46:20 crc kubenswrapper[4916]: I1203 19:46:20.064664 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2" event={"ID":"cf093783-d31b-42fc-a85f-fff6c35fdae8","Type":"ContainerStarted","Data":"a975e9dd61015692393f26f62965312f49c5329b90eae6a3ebbeb77bb8f6ac88"} Dec 03 19:46:20 crc kubenswrapper[4916]: I1203 19:46:20.073373 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t" event={"ID":"7303e0f2-e41d-4220-a72b-88e6b44b016c","Type":"ContainerStarted","Data":"33473655f799d4b76cd483869b6107b175e171abc6213faf11c59a733aaebc9f"} Dec 03 19:46:20 crc kubenswrapper[4916]: I1203 19:46:20.114956 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq" event={"ID":"dc81071a-3da1-4e63-b733-13e39ecfb823","Type":"ContainerStarted","Data":"07efb19472883ee42d66e6b9b5ce20560ea636af64404d54b2c62b05fc71a8b7"} Dec 03 19:46:20 crc kubenswrapper[4916]: E1203 19:46:20.124383 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k" podUID="1a7e3254-35ac-48fa-8ab7-11e85c780369" Dec 03 19:46:20 crc kubenswrapper[4916]: I1203 19:46:20.391973 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:20 crc kubenswrapper[4916]: E1203 19:46:20.392133 4916 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 19:46:20 crc kubenswrapper[4916]: E1203 19:46:20.392302 4916 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 19:46:20 crc kubenswrapper[4916]: E1203 19:46:20.392320 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:36.392297437 +0000 UTC m=+1012.355107703 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "metrics-server-cert" not found Dec 03 19:46:20 crc kubenswrapper[4916]: I1203 19:46:20.392258 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:20 crc kubenswrapper[4916]: E1203 19:46:20.392336 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs podName:e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2 nodeName:}" failed. No retries permitted until 2025-12-03 19:46:36.392326577 +0000 UTC m=+1012.355136843 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs") pod "openstack-operator-controller-manager-d8ff785c7-jbr4p" (UID: "e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2") : secret "webhook-server-cert" not found Dec 03 19:46:24 crc kubenswrapper[4916]: I1203 19:46:24.015279 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-dth7w"] Dec 03 19:46:24 crc kubenswrapper[4916]: W1203 19:46:24.447887 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda571d18b_686d_472d_9086_e192ec504db4.slice/crio-41dee30ee3ca4ec5c47eb1c53bdb88b2500887e5521299066aeea9f527eeddfc WatchSource:0}: Error finding container 41dee30ee3ca4ec5c47eb1c53bdb88b2500887e5521299066aeea9f527eeddfc: Status 404 returned error can't find the container with id 41dee30ee3ca4ec5c47eb1c53bdb88b2500887e5521299066aeea9f527eeddfc Dec 03 19:46:25 crc kubenswrapper[4916]: I1203 19:46:25.152421 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" event={"ID":"a571d18b-686d-472d-9086-e192ec504db4","Type":"ContainerStarted","Data":"41dee30ee3ca4ec5c47eb1c53bdb88b2500887e5521299066aeea9f527eeddfc"} Dec 03 19:46:27 crc kubenswrapper[4916]: E1203 19:46:27.082091 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" podUID="d2c7e9c7-96cd-47bd-978a-c3fd41c74089" Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.177065 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" event={"ID":"462898ce-79ab-4cd6-b05e-e19b65c80fa1","Type":"ContainerStarted","Data":"ce9ddecce466d3d296b5cd41f3cd8224f8aa985067723f25deb1c5f6e3853c27"} Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.180780 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" event={"ID":"a571d18b-686d-472d-9086-e192ec504db4","Type":"ContainerStarted","Data":"caea1816448f23e8021cc62c3601ffa44857c259653264615218cda77c705ba8"} Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.183073 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" event={"ID":"d2c7e9c7-96cd-47bd-978a-c3fd41c74089","Type":"ContainerStarted","Data":"680104bd1fa978f5830e53f7a9bd762466daa5f6c99e9214927c57089a944cfe"} Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.186335 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" event={"ID":"fc0e679d-4033-4479-ba7a-cdc160e0b6ad","Type":"ContainerStarted","Data":"78d43dacee37189cb3cdcd5a4d766d8958c118c081e5bb67ac3baf476207b425"} Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.188367 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" event={"ID":"6f4635b6-2410-4d5f-a7c9-3cf0a04739f7","Type":"ContainerStarted","Data":"9a559380c66328b86d890bd19553473f86757bece0935c25c739247582c77b3d"} Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.190710 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t" event={"ID":"7303e0f2-e41d-4220-a72b-88e6b44b016c","Type":"ContainerStarted","Data":"8f9a58b8f97e9dd77587965765008e52b986831da82929aa3185d2eaa7b8a6c5"} Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.191326 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t" Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.196180 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t" Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.196330 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" event={"ID":"463726a8-9ad4-486b-b5b8-166fed3a6190","Type":"ContainerStarted","Data":"647a259ed826b8ea3d937ba617ffaa4bca354c2f972b99a77681633866c325d4"} Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.221427 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" event={"ID":"a5bc0003-390d-477e-8b21-f7fda61cb051","Type":"ContainerStarted","Data":"7cbf4b002f58b45ce58410c76a392c7be74bde3befb79e5619783b94f5206673"} Dec 03 19:46:27 crc kubenswrapper[4916]: E1203 19:46:27.237903 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" podUID="d2c7e9c7-96cd-47bd-978a-c3fd41c74089" Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.243076 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" event={"ID":"251c39c1-e63c-4772-a0e6-88528867a64d","Type":"ContainerStarted","Data":"aa3d953a8fd713d3a8e7ee0a259427eaadcbaf09efe8e0e835448c0160fdee06"} Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.277970 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg" event={"ID":"907117b8-0a09-440c-bb47-bfa09ccec80b","Type":"ContainerStarted","Data":"28d4cc436200e145b635c253cf298b6afd77a45fc25309e9ad57ff66c1a16bf9"} Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.283727 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg" Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.290865 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg" Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.300667 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-jltdg" podStartSLOduration=2.7870670459999998 podStartE2EDuration="24.30065156s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.174453208 +0000 UTC m=+981.137263474" lastFinishedPulling="2025-12-03 19:46:26.688037722 +0000 UTC m=+1002.650847988" observedRunningTime="2025-12-03 19:46:27.297130496 +0000 UTC m=+1003.259940762" watchObservedRunningTime="2025-12-03 19:46:27.30065156 +0000 UTC m=+1003.263461826" Dec 03 19:46:27 crc kubenswrapper[4916]: I1203 19:46:27.301956 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-sd86t" podStartSLOduration=3.18327987 podStartE2EDuration="24.301950855s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.639278699 +0000 UTC m=+981.602088955" lastFinishedPulling="2025-12-03 19:46:26.757949644 +0000 UTC m=+1002.720759940" observedRunningTime="2025-12-03 19:46:27.217072774 +0000 UTC m=+1003.179883040" watchObservedRunningTime="2025-12-03 19:46:27.301950855 +0000 UTC m=+1003.264761121" Dec 03 19:46:27 crc kubenswrapper[4916]: E1203 19:46:27.867944 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" podUID="31355b8f-c9a1-4ddf-a97f-de6d4f506a67" Dec 03 19:46:27 crc kubenswrapper[4916]: E1203 19:46:27.914130 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" podUID="8829329b-8de5-4a0d-bd48-9cb7338c2dd1" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.287323 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98" event={"ID":"04ce9fc2-2134-4d10-b3d8-764bca295eed","Type":"ContainerStarted","Data":"ffb78e9c12e12abf03a65826c9067329ae8e92bbed61078ce29065438acc390a"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.287546 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.289172 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" event={"ID":"462898ce-79ab-4cd6-b05e-e19b65c80fa1","Type":"ContainerStarted","Data":"25866d58417350a893fca0095acc0a7aacfcafc5119bd901dea9b62d0df76bb6"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.289940 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.291226 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.291530 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq" event={"ID":"dc81071a-3da1-4e63-b733-13e39ecfb823","Type":"ContainerStarted","Data":"55a1a0ef2fe9dc75ab5e79ea2a47810dcb5315bb99449640ba4840ba2c16398b"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.291739 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.297014 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" event={"ID":"463726a8-9ad4-486b-b5b8-166fed3a6190","Type":"ContainerStarted","Data":"b80ffe5d1809c6e7f9382dc3ac20d3d81f1e28d90eb2bcabdae95c436834cb8d"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.297146 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.297264 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.301478 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" event={"ID":"251c39c1-e63c-4772-a0e6-88528867a64d","Type":"ContainerStarted","Data":"652b312d67e8df1e17247f65c78dafb8bd65981104ac81e54764a090e77ba521"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.302014 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.305865 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6" event={"ID":"a83c7d54-9430-456f-b83e-abed5d9030b8","Type":"ContainerStarted","Data":"9648d707b4796fc98a5fcf0a7a135572a442941c923cf12d5f2983dcf6d2b6dc"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.308124 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.309799 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.311016 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5" event={"ID":"232cd6fc-5f1d-4398-ae7d-5c34f49843f3","Type":"ContainerStarted","Data":"eee71be3405ef35a6e108c3f3f0ced295840efcd1b84da7790159eb403f6cc92"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.312931 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.313886 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.313866 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-7bs98" podStartSLOduration=2.457522274 podStartE2EDuration="24.313850208s" podCreationTimestamp="2025-12-03 19:46:04 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.62542024 +0000 UTC m=+981.588230506" lastFinishedPulling="2025-12-03 19:46:27.481748174 +0000 UTC m=+1003.444558440" observedRunningTime="2025-12-03 19:46:28.309895673 +0000 UTC m=+1004.272705939" watchObservedRunningTime="2025-12-03 19:46:28.313850208 +0000 UTC m=+1004.276660474" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.318273 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv" event={"ID":"b63d5bf7-8901-4bce-90d9-0006ae946230","Type":"ContainerStarted","Data":"9f8df6ad2d61f87bb4cced60631374cea505a7bbeaf953d19348f9c609d4b712"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.319699 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.324258 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2" event={"ID":"cf093783-d31b-42fc-a85f-fff6c35fdae8","Type":"ContainerStarted","Data":"1623e173ea3ca84ef76ca4cf6e8bbe022da22d474439d340587cd585ce14f79e"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.324494 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.324658 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.329666 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" event={"ID":"a571d18b-686d-472d-9086-e192ec504db4","Type":"ContainerStarted","Data":"ec94c643bd79a3338dced5e2b5e74c5c0f35267d432cfd4ede9b1257cade7d73"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.329799 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.331005 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" event={"ID":"6f4635b6-2410-4d5f-a7c9-3cf0a04739f7","Type":"ContainerStarted","Data":"b0a3dd054964133efac7fc0a62fc16ee9b083d93cd9de77cae7be450fb3744bd"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.331504 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.332120 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.333983 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" event={"ID":"8829329b-8de5-4a0d-bd48-9cb7338c2dd1","Type":"ContainerStarted","Data":"fd59b90ff2e9e1b9ae01500275f22f1ee0e9c4cf95541b05f7537fcf35e1b0a4"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.335341 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" podStartSLOduration=4.882875006 podStartE2EDuration="24.33532494s" podCreationTimestamp="2025-12-03 19:46:04 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.63895208 +0000 UTC m=+981.601762346" lastFinishedPulling="2025-12-03 19:46:25.091402014 +0000 UTC m=+1001.054212280" observedRunningTime="2025-12-03 19:46:28.332779872 +0000 UTC m=+1004.295590148" watchObservedRunningTime="2025-12-03 19:46:28.33532494 +0000 UTC m=+1004.298135206" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.343205 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4" event={"ID":"c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae","Type":"ContainerStarted","Data":"05975a37b3ba396170450449661fe116663002cfcc144025986387548a789988"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.343908 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.349665 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.350034 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" event={"ID":"31355b8f-c9a1-4ddf-a97f-de6d4f506a67","Type":"ContainerStarted","Data":"1ec2a7397b819adead88bd4e212bf46acfec2239b9e8167e6cf633479d6ae411"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.352186 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" event={"ID":"fc0e679d-4033-4479-ba7a-cdc160e0b6ad","Type":"ContainerStarted","Data":"497453f494cbd706cf428055061f2c00912a8c36d097bfa3fde873150da82f7e"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.360737 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.365655 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f" event={"ID":"877f6f89-deed-4f06-adb1-cfa6b5254db2","Type":"ContainerStarted","Data":"1d451a7512c06668b18c96789bae7b745abb15da510888a1ec8c3b7a328405ed"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.366054 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.370031 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.379364 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" event={"ID":"a5bc0003-390d-477e-8b21-f7fda61cb051","Type":"ContainerStarted","Data":"4bc39345735fcd16e6ce6efe6c87ece154922ad732d46c321cdda40b8f44bbc1"} Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.380012 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" Dec 03 19:46:28 crc kubenswrapper[4916]: E1203 19:46:28.390909 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" podUID="d2c7e9c7-96cd-47bd-978a-c3fd41c74089" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.448327 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-7q6dq" podStartSLOduration=4.031065961 podStartE2EDuration="25.448301669s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.614854468 +0000 UTC m=+981.577664734" lastFinishedPulling="2025-12-03 19:46:27.032090176 +0000 UTC m=+1002.994900442" observedRunningTime="2025-12-03 19:46:28.382153277 +0000 UTC m=+1004.344963563" watchObservedRunningTime="2025-12-03 19:46:28.448301669 +0000 UTC m=+1004.411111935" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.480331 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" podStartSLOduration=5.724232892 podStartE2EDuration="25.480303592s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.662064606 +0000 UTC m=+981.624874872" lastFinishedPulling="2025-12-03 19:46:25.418135286 +0000 UTC m=+1001.380945572" observedRunningTime="2025-12-03 19:46:28.425968055 +0000 UTC m=+1004.388778321" watchObservedRunningTime="2025-12-03 19:46:28.480303592 +0000 UTC m=+1004.443113858" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.487672 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" podStartSLOduration=5.9298886490000005 podStartE2EDuration="25.487652618s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.648280979 +0000 UTC m=+981.611091245" lastFinishedPulling="2025-12-03 19:46:25.206044948 +0000 UTC m=+1001.168855214" observedRunningTime="2025-12-03 19:46:28.449932373 +0000 UTC m=+1004.412742639" watchObservedRunningTime="2025-12-03 19:46:28.487652618 +0000 UTC m=+1004.450462884" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.518174 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-x6tb6" podStartSLOduration=2.919981376 podStartE2EDuration="25.51815793s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:04.88621569 +0000 UTC m=+980.849025956" lastFinishedPulling="2025-12-03 19:46:27.484392244 +0000 UTC m=+1003.447202510" observedRunningTime="2025-12-03 19:46:28.515093099 +0000 UTC m=+1004.477903365" watchObservedRunningTime="2025-12-03 19:46:28.51815793 +0000 UTC m=+1004.480968196" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.573479 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-nsfm2" podStartSLOduration=2.945595048 podStartE2EDuration="25.573461723s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:04.95190856 +0000 UTC m=+980.914718826" lastFinishedPulling="2025-12-03 19:46:27.579775235 +0000 UTC m=+1003.542585501" observedRunningTime="2025-12-03 19:46:28.543459354 +0000 UTC m=+1004.506269620" watchObservedRunningTime="2025-12-03 19:46:28.573461723 +0000 UTC m=+1004.536271979" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.656274 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-csdvv" podStartSLOduration=3.807060515 podStartE2EDuration="25.656257819s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.17791947 +0000 UTC m=+981.140729726" lastFinishedPulling="2025-12-03 19:46:27.027116774 +0000 UTC m=+1002.989927030" observedRunningTime="2025-12-03 19:46:28.646735845 +0000 UTC m=+1004.609546111" watchObservedRunningTime="2025-12-03 19:46:28.656257819 +0000 UTC m=+1004.619068085" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.670368 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" podStartSLOduration=6.282565713 podStartE2EDuration="25.670350144s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.645243238 +0000 UTC m=+981.608053504" lastFinishedPulling="2025-12-03 19:46:25.033027669 +0000 UTC m=+1000.995837935" observedRunningTime="2025-12-03 19:46:28.668923396 +0000 UTC m=+1004.631733652" watchObservedRunningTime="2025-12-03 19:46:28.670350144 +0000 UTC m=+1004.633160410" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.687171 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-6x48f" podStartSLOduration=4.037363719 podStartE2EDuration="25.686073313s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.190258318 +0000 UTC m=+981.153068584" lastFinishedPulling="2025-12-03 19:46:26.838967892 +0000 UTC m=+1002.801778178" observedRunningTime="2025-12-03 19:46:28.680922396 +0000 UTC m=+1004.643732662" watchObservedRunningTime="2025-12-03 19:46:28.686073313 +0000 UTC m=+1004.648883579" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.711398 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" podStartSLOduration=5.212142787 podStartE2EDuration="24.711379427s" podCreationTimestamp="2025-12-03 19:46:04 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.635030606 +0000 UTC m=+981.597840872" lastFinishedPulling="2025-12-03 19:46:25.134267246 +0000 UTC m=+1001.097077512" observedRunningTime="2025-12-03 19:46:28.711057148 +0000 UTC m=+1004.673867414" watchObservedRunningTime="2025-12-03 19:46:28.711379427 +0000 UTC m=+1004.674189693" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.731329 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-6pzj5" podStartSLOduration=2.953363605 podStartE2EDuration="25.731311808s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.013160631 +0000 UTC m=+980.975970897" lastFinishedPulling="2025-12-03 19:46:27.791108834 +0000 UTC m=+1003.753919100" observedRunningTime="2025-12-03 19:46:28.72424803 +0000 UTC m=+1004.687058296" watchObservedRunningTime="2025-12-03 19:46:28.731311808 +0000 UTC m=+1004.694122074" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.773094 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" podStartSLOduration=5.413590203 podStartE2EDuration="24.77308135s" podCreationTimestamp="2025-12-03 19:46:04 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.673544732 +0000 UTC m=+981.636354998" lastFinishedPulling="2025-12-03 19:46:25.033035879 +0000 UTC m=+1000.995846145" observedRunningTime="2025-12-03 19:46:28.772201237 +0000 UTC m=+1004.735011503" watchObservedRunningTime="2025-12-03 19:46:28.77308135 +0000 UTC m=+1004.735891616" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.798844 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" podStartSLOduration=23.546921343 podStartE2EDuration="25.798827325s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:24.449911307 +0000 UTC m=+1000.412721573" lastFinishedPulling="2025-12-03 19:46:26.701817279 +0000 UTC m=+1002.664627555" observedRunningTime="2025-12-03 19:46:28.79558257 +0000 UTC m=+1004.758392836" watchObservedRunningTime="2025-12-03 19:46:28.798827325 +0000 UTC m=+1004.761637591" Dec 03 19:46:28 crc kubenswrapper[4916]: I1203 19:46:28.827783 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-cjms4" podStartSLOduration=4.528625964 podStartE2EDuration="25.827766536s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.63104447 +0000 UTC m=+981.593854736" lastFinishedPulling="2025-12-03 19:46:26.930185022 +0000 UTC m=+1002.892995308" observedRunningTime="2025-12-03 19:46:28.821989732 +0000 UTC m=+1004.784799998" watchObservedRunningTime="2025-12-03 19:46:28.827766536 +0000 UTC m=+1004.790576802" Dec 03 19:46:29 crc kubenswrapper[4916]: I1203 19:46:29.386466 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" event={"ID":"8829329b-8de5-4a0d-bd48-9cb7338c2dd1","Type":"ContainerStarted","Data":"234ead35ae2ae4c5e3f99129c369eb853c5767ec24dafaff30cfad2b8f9974f5"} Dec 03 19:46:29 crc kubenswrapper[4916]: I1203 19:46:29.386778 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" Dec 03 19:46:29 crc kubenswrapper[4916]: I1203 19:46:29.388396 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" event={"ID":"31355b8f-c9a1-4ddf-a97f-de6d4f506a67","Type":"ContainerStarted","Data":"c84f2d2d385dabdb29d22cacdb600cf786ec4b54387c071dc3004168a776e258"} Dec 03 19:46:29 crc kubenswrapper[4916]: I1203 19:46:29.407781 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" podStartSLOduration=3.188053125 podStartE2EDuration="26.407765315s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.565994927 +0000 UTC m=+981.528805193" lastFinishedPulling="2025-12-03 19:46:28.785707117 +0000 UTC m=+1004.748517383" observedRunningTime="2025-12-03 19:46:29.403578054 +0000 UTC m=+1005.366388310" watchObservedRunningTime="2025-12-03 19:46:29.407765315 +0000 UTC m=+1005.370575581" Dec 03 19:46:29 crc kubenswrapper[4916]: I1203 19:46:29.427595 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" podStartSLOduration=2.659096515 podStartE2EDuration="26.427558822s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.018261157 +0000 UTC m=+980.981071423" lastFinishedPulling="2025-12-03 19:46:28.786723464 +0000 UTC m=+1004.749533730" observedRunningTime="2025-12-03 19:46:29.423026482 +0000 UTC m=+1005.385836758" watchObservedRunningTime="2025-12-03 19:46:29.427558822 +0000 UTC m=+1005.390369088" Dec 03 19:46:30 crc kubenswrapper[4916]: I1203 19:46:30.395002 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" Dec 03 19:46:34 crc kubenswrapper[4916]: I1203 19:46:34.109462 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-kndn4" Dec 03 19:46:34 crc kubenswrapper[4916]: I1203 19:46:34.315320 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-mllvg" Dec 03 19:46:34 crc kubenswrapper[4916]: I1203 19:46:34.399550 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-wp7lm" Dec 03 19:46:34 crc kubenswrapper[4916]: I1203 19:46:34.405176 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-29mr4" Dec 03 19:46:34 crc kubenswrapper[4916]: I1203 19:46:34.519055 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-xvp6h" Dec 03 19:46:34 crc kubenswrapper[4916]: I1203 19:46:34.618949 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-65b6f7cdd5-fbb7s" Dec 03 19:46:34 crc kubenswrapper[4916]: I1203 19:46:34.640731 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-2txxc" Dec 03 19:46:34 crc kubenswrapper[4916]: I1203 19:46:34.656246 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7vz8k" Dec 03 19:46:36 crc kubenswrapper[4916]: I1203 19:46:36.065708 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:36 crc kubenswrapper[4916]: I1203 19:46:36.072814 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f356aae7-fed8-4f1b-a863-d7b47bcda904-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs\" (UID: \"f356aae7-fed8-4f1b-a863-d7b47bcda904\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:36 crc kubenswrapper[4916]: I1203 19:46:36.248736 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:36 crc kubenswrapper[4916]: I1203 19:46:36.470964 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:36 crc kubenswrapper[4916]: I1203 19:46:36.471352 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:36 crc kubenswrapper[4916]: I1203 19:46:36.482805 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-metrics-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:36 crc kubenswrapper[4916]: I1203 19:46:36.492561 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2-webhook-certs\") pod \"openstack-operator-controller-manager-d8ff785c7-jbr4p\" (UID: \"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2\") " pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:36 crc kubenswrapper[4916]: I1203 19:46:36.714106 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:36 crc kubenswrapper[4916]: I1203 19:46:36.717052 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs"] Dec 03 19:46:36 crc kubenswrapper[4916]: W1203 19:46:36.720418 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf356aae7_fed8_4f1b_a863_d7b47bcda904.slice/crio-ec692a3552755ef7317cb93560be92f36d0106a83872fc1351a7822b9006571a WatchSource:0}: Error finding container ec692a3552755ef7317cb93560be92f36d0106a83872fc1351a7822b9006571a: Status 404 returned error can't find the container with id ec692a3552755ef7317cb93560be92f36d0106a83872fc1351a7822b9006571a Dec 03 19:46:37 crc kubenswrapper[4916]: I1203 19:46:37.018622 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p"] Dec 03 19:46:37 crc kubenswrapper[4916]: W1203 19:46:37.025148 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode7ce3ac9_9ba0_4991_a95b_c33fca5a2ed2.slice/crio-7b8a691ad4380a5c20ee4df2f078e4c976adac7a99f5be715f3f906719bf28fb WatchSource:0}: Error finding container 7b8a691ad4380a5c20ee4df2f078e4c976adac7a99f5be715f3f906719bf28fb: Status 404 returned error can't find the container with id 7b8a691ad4380a5c20ee4df2f078e4c976adac7a99f5be715f3f906719bf28fb Dec 03 19:46:37 crc kubenswrapper[4916]: I1203 19:46:37.460671 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" event={"ID":"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2","Type":"ContainerStarted","Data":"7b8a691ad4380a5c20ee4df2f078e4c976adac7a99f5be715f3f906719bf28fb"} Dec 03 19:46:37 crc kubenswrapper[4916]: I1203 19:46:37.464042 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" event={"ID":"f356aae7-fed8-4f1b-a863-d7b47bcda904","Type":"ContainerStarted","Data":"ec692a3552755ef7317cb93560be92f36d0106a83872fc1351a7822b9006571a"} Dec 03 19:46:39 crc kubenswrapper[4916]: I1203 19:46:39.739373 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-dth7w" Dec 03 19:46:45 crc kubenswrapper[4916]: I1203 19:46:45.548929 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k" event={"ID":"1a7e3254-35ac-48fa-8ab7-11e85c780369","Type":"ContainerStarted","Data":"e6615fc8c4a0ef5f5173c620be80f99ecc6208dc7024dbeebaaa9f209441eca4"} Dec 03 19:46:45 crc kubenswrapper[4916]: I1203 19:46:45.550308 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" event={"ID":"e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2","Type":"ContainerStarted","Data":"f8daa48aa7ba29025e3837191b27d735c03f944713af6d513a8105922440ebe1"} Dec 03 19:46:45 crc kubenswrapper[4916]: I1203 19:46:45.550708 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:46:45 crc kubenswrapper[4916]: I1203 19:46:45.551979 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" event={"ID":"f356aae7-fed8-4f1b-a863-d7b47bcda904","Type":"ContainerStarted","Data":"8a1e7365c33ca34b2ab1c90ae3a98623fa015e095482616f23fc0d744bd023c6"} Dec 03 19:46:45 crc kubenswrapper[4916]: I1203 19:46:45.591910 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" podStartSLOduration=41.591892272 podStartE2EDuration="41.591892272s" podCreationTimestamp="2025-12-03 19:46:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:46:45.590274859 +0000 UTC m=+1021.553085125" watchObservedRunningTime="2025-12-03 19:46:45.591892272 +0000 UTC m=+1021.554702538" Dec 03 19:46:46 crc kubenswrapper[4916]: I1203 19:46:46.560011 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" event={"ID":"d2c7e9c7-96cd-47bd-978a-c3fd41c74089","Type":"ContainerStarted","Data":"1384beb39bbe1a7c13fb8ca7525f87457ff3b8c3454044681cb4e362fb7b1ed0"} Dec 03 19:46:46 crc kubenswrapper[4916]: I1203 19:46:46.560413 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" Dec 03 19:46:46 crc kubenswrapper[4916]: I1203 19:46:46.561820 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" event={"ID":"f356aae7-fed8-4f1b-a863-d7b47bcda904","Type":"ContainerStarted","Data":"5ad3e03a7ff0573fec6346698219b2127a0fed41f6c1a31dc874763494f29a81"} Dec 03 19:46:46 crc kubenswrapper[4916]: I1203 19:46:46.562059 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:46 crc kubenswrapper[4916]: I1203 19:46:46.579419 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" podStartSLOduration=2.857618315 podStartE2EDuration="43.579400446s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:04.839175727 +0000 UTC m=+980.801985993" lastFinishedPulling="2025-12-03 19:46:45.560957848 +0000 UTC m=+1021.523768124" observedRunningTime="2025-12-03 19:46:46.57466937 +0000 UTC m=+1022.537479656" watchObservedRunningTime="2025-12-03 19:46:46.579400446 +0000 UTC m=+1022.542210722" Dec 03 19:46:46 crc kubenswrapper[4916]: I1203 19:46:46.590820 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-m5s6k" podStartSLOduration=2.898411739 podStartE2EDuration="42.5908023s" podCreationTimestamp="2025-12-03 19:46:04 +0000 UTC" firstStartedPulling="2025-12-03 19:46:05.621404443 +0000 UTC m=+981.584214709" lastFinishedPulling="2025-12-03 19:46:45.313795004 +0000 UTC m=+1021.276605270" observedRunningTime="2025-12-03 19:46:46.588626812 +0000 UTC m=+1022.551437088" watchObservedRunningTime="2025-12-03 19:46:46.5908023 +0000 UTC m=+1022.553612566" Dec 03 19:46:46 crc kubenswrapper[4916]: I1203 19:46:46.627897 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" podStartSLOduration=35.028770228 podStartE2EDuration="43.627877658s" podCreationTimestamp="2025-12-03 19:46:03 +0000 UTC" firstStartedPulling="2025-12-03 19:46:36.732878839 +0000 UTC m=+1012.695689135" lastFinishedPulling="2025-12-03 19:46:45.331986289 +0000 UTC m=+1021.294796565" observedRunningTime="2025-12-03 19:46:46.621164559 +0000 UTC m=+1022.583974865" watchObservedRunningTime="2025-12-03 19:46:46.627877658 +0000 UTC m=+1022.590687934" Dec 03 19:46:54 crc kubenswrapper[4916]: I1203 19:46:54.007986 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-xn476" Dec 03 19:46:56 crc kubenswrapper[4916]: I1203 19:46:56.259050 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs" Dec 03 19:46:56 crc kubenswrapper[4916]: I1203 19:46:56.723616 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-d8ff785c7-jbr4p" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.512369 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k9wvr"] Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.516521 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k9wvr"] Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.516639 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.523036 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.523192 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-vfwxk" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.523237 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.523865 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.569494 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ckgmb"] Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.570550 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.574285 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.579647 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ckgmb"] Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.630143 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/685fd815-e94b-4130-a1cd-c566bb975525-config\") pod \"dnsmasq-dns-675f4bcbfc-k9wvr\" (UID: \"685fd815-e94b-4130-a1cd-c566bb975525\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.630194 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-config\") pod \"dnsmasq-dns-78dd6ddcc-ckgmb\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.630220 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-ckgmb\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.630275 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hhnv\" (UniqueName: \"kubernetes.io/projected/249477db-bbd7-4c96-a0b7-e36c3243598d-kube-api-access-7hhnv\") pod \"dnsmasq-dns-78dd6ddcc-ckgmb\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.630309 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv27p\" (UniqueName: \"kubernetes.io/projected/685fd815-e94b-4130-a1cd-c566bb975525-kube-api-access-zv27p\") pod \"dnsmasq-dns-675f4bcbfc-k9wvr\" (UID: \"685fd815-e94b-4130-a1cd-c566bb975525\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.731405 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv27p\" (UniqueName: \"kubernetes.io/projected/685fd815-e94b-4130-a1cd-c566bb975525-kube-api-access-zv27p\") pod \"dnsmasq-dns-675f4bcbfc-k9wvr\" (UID: \"685fd815-e94b-4130-a1cd-c566bb975525\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.731493 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/685fd815-e94b-4130-a1cd-c566bb975525-config\") pod \"dnsmasq-dns-675f4bcbfc-k9wvr\" (UID: \"685fd815-e94b-4130-a1cd-c566bb975525\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.731535 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-config\") pod \"dnsmasq-dns-78dd6ddcc-ckgmb\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.731586 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-ckgmb\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.731645 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hhnv\" (UniqueName: \"kubernetes.io/projected/249477db-bbd7-4c96-a0b7-e36c3243598d-kube-api-access-7hhnv\") pod \"dnsmasq-dns-78dd6ddcc-ckgmb\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.732593 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-config\") pod \"dnsmasq-dns-78dd6ddcc-ckgmb\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.732790 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-ckgmb\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.733192 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/685fd815-e94b-4130-a1cd-c566bb975525-config\") pod \"dnsmasq-dns-675f4bcbfc-k9wvr\" (UID: \"685fd815-e94b-4130-a1cd-c566bb975525\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.754963 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hhnv\" (UniqueName: \"kubernetes.io/projected/249477db-bbd7-4c96-a0b7-e36c3243598d-kube-api-access-7hhnv\") pod \"dnsmasq-dns-78dd6ddcc-ckgmb\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.764065 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv27p\" (UniqueName: \"kubernetes.io/projected/685fd815-e94b-4130-a1cd-c566bb975525-kube-api-access-zv27p\") pod \"dnsmasq-dns-675f4bcbfc-k9wvr\" (UID: \"685fd815-e94b-4130-a1cd-c566bb975525\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.851600 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" Dec 03 19:47:14 crc kubenswrapper[4916]: I1203 19:47:14.892138 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:15 crc kubenswrapper[4916]: I1203 19:47:15.312001 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k9wvr"] Dec 03 19:47:15 crc kubenswrapper[4916]: I1203 19:47:15.320760 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 19:47:15 crc kubenswrapper[4916]: I1203 19:47:15.383389 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ckgmb"] Dec 03 19:47:15 crc kubenswrapper[4916]: W1203 19:47:15.390157 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod249477db_bbd7_4c96_a0b7_e36c3243598d.slice/crio-b1408623074e0dba56dafcb65e92c875028fb31bc03746c439effe029627d6c4 WatchSource:0}: Error finding container b1408623074e0dba56dafcb65e92c875028fb31bc03746c439effe029627d6c4: Status 404 returned error can't find the container with id b1408623074e0dba56dafcb65e92c875028fb31bc03746c439effe029627d6c4 Dec 03 19:47:15 crc kubenswrapper[4916]: I1203 19:47:15.815104 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" event={"ID":"249477db-bbd7-4c96-a0b7-e36c3243598d","Type":"ContainerStarted","Data":"b1408623074e0dba56dafcb65e92c875028fb31bc03746c439effe029627d6c4"} Dec 03 19:47:15 crc kubenswrapper[4916]: I1203 19:47:15.816829 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" event={"ID":"685fd815-e94b-4130-a1cd-c566bb975525","Type":"ContainerStarted","Data":"2990c186fc364d1f6971e369616a020851b3dc0036a9b86c71a494e4fc42a525"} Dec 03 19:47:16 crc kubenswrapper[4916]: I1203 19:47:16.159187 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:47:16 crc kubenswrapper[4916]: I1203 19:47:16.159276 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.525301 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k9wvr"] Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.550244 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-r22qg"] Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.551287 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.574073 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-r22qg"] Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.673879 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-dns-svc\") pod \"dnsmasq-dns-666b6646f7-r22qg\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.674028 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-config\") pod \"dnsmasq-dns-666b6646f7-r22qg\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.674193 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6vr8\" (UniqueName: \"kubernetes.io/projected/5b4a627a-cfa7-4e96-9563-5e859ad5525a-kube-api-access-d6vr8\") pod \"dnsmasq-dns-666b6646f7-r22qg\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.775302 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-config\") pod \"dnsmasq-dns-666b6646f7-r22qg\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.775391 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6vr8\" (UniqueName: \"kubernetes.io/projected/5b4a627a-cfa7-4e96-9563-5e859ad5525a-kube-api-access-d6vr8\") pod \"dnsmasq-dns-666b6646f7-r22qg\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.775460 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-dns-svc\") pod \"dnsmasq-dns-666b6646f7-r22qg\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.776632 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-dns-svc\") pod \"dnsmasq-dns-666b6646f7-r22qg\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.777055 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-config\") pod \"dnsmasq-dns-666b6646f7-r22qg\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.894717 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6vr8\" (UniqueName: \"kubernetes.io/projected/5b4a627a-cfa7-4e96-9563-5e859ad5525a-kube-api-access-d6vr8\") pod \"dnsmasq-dns-666b6646f7-r22qg\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.969808 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ckgmb"] Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.987284 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-f7sfp"] Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.988478 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:17 crc kubenswrapper[4916]: I1203 19:47:17.996700 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-f7sfp"] Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.093941 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-f7sfp\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.094017 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wkwm\" (UniqueName: \"kubernetes.io/projected/b5c9236a-7a8f-4fd9-9831-2370c10ab466-kube-api-access-8wkwm\") pod \"dnsmasq-dns-57d769cc4f-f7sfp\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.094055 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-config\") pod \"dnsmasq-dns-57d769cc4f-f7sfp\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.176221 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.195199 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-f7sfp\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.195251 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wkwm\" (UniqueName: \"kubernetes.io/projected/b5c9236a-7a8f-4fd9-9831-2370c10ab466-kube-api-access-8wkwm\") pod \"dnsmasq-dns-57d769cc4f-f7sfp\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.195283 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-config\") pod \"dnsmasq-dns-57d769cc4f-f7sfp\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.196173 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-config\") pod \"dnsmasq-dns-57d769cc4f-f7sfp\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.196293 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-f7sfp\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.213499 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wkwm\" (UniqueName: \"kubernetes.io/projected/b5c9236a-7a8f-4fd9-9831-2370c10ab466-kube-api-access-8wkwm\") pod \"dnsmasq-dns-57d769cc4f-f7sfp\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.313069 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.641316 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-r22qg"] Dec 03 19:47:18 crc kubenswrapper[4916]: W1203 19:47:18.649676 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b4a627a_cfa7_4e96_9563_5e859ad5525a.slice/crio-d27f369993a95d9e0dd8ec480a831bad9c576ce9e5c340226d6f1b000a4beb3f WatchSource:0}: Error finding container d27f369993a95d9e0dd8ec480a831bad9c576ce9e5c340226d6f1b000a4beb3f: Status 404 returned error can't find the container with id d27f369993a95d9e0dd8ec480a831bad9c576ce9e5c340226d6f1b000a4beb3f Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.795250 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-f7sfp"] Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.854664 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.856613 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.860070 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.860324 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.860546 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.862458 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.862678 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.865261 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-xngf2" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.865289 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.870438 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.882780 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" event={"ID":"b5c9236a-7a8f-4fd9-9831-2370c10ab466","Type":"ContainerStarted","Data":"7b497c9481b3d206b67e68b178e02aa0865cf22ba62a85dd68d1df695c23bea6"} Dec 03 19:47:18 crc kubenswrapper[4916]: I1203 19:47:18.884590 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" event={"ID":"5b4a627a-cfa7-4e96-9563-5e859ad5525a","Type":"ContainerStarted","Data":"d27f369993a95d9e0dd8ec480a831bad9c576ce9e5c340226d6f1b000a4beb3f"} Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.005382 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6ffb0836-d978-4f53-9a48-1174b647eeaf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.005422 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lf2z\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-kube-api-access-7lf2z\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.005448 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.005475 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.005496 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.005513 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.005542 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-config-data\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.005558 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.005610 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.005631 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6ffb0836-d978-4f53-9a48-1174b647eeaf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.005821 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.107769 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6ffb0836-d978-4f53-9a48-1174b647eeaf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.107822 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lf2z\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-kube-api-access-7lf2z\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.107854 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.107880 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.107905 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.107926 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.107955 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-config-data\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.107973 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.107994 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.108011 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6ffb0836-d978-4f53-9a48-1174b647eeaf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.108036 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.108768 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.109422 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.109553 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.110171 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-config-data\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.110212 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.113659 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.118831 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6ffb0836-d978-4f53-9a48-1174b647eeaf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.118962 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.120376 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.133100 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6ffb0836-d978-4f53-9a48-1174b647eeaf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.143410 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lf2z\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-kube-api-access-7lf2z\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.154500 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.155795 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.159951 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.160209 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.160461 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jlv62" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.160533 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.160626 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.160715 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.161061 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.161383 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.165302 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.204937 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.315337 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/13520585-08f1-45f7-b40d-d53b9f047cfd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.315383 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.315419 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.315446 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.315471 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.315550 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/13520585-08f1-45f7-b40d-d53b9f047cfd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.315592 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.315611 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.315633 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.315675 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.315793 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7sjs\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-kube-api-access-j7sjs\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.418421 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/13520585-08f1-45f7-b40d-d53b9f047cfd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.418903 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.418937 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.419001 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.419036 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.419067 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7sjs\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-kube-api-access-j7sjs\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.419116 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/13520585-08f1-45f7-b40d-d53b9f047cfd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.419312 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.420644 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.421682 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.423503 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.424010 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/13520585-08f1-45f7-b40d-d53b9f047cfd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.424210 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.424628 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.425421 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/13520585-08f1-45f7-b40d-d53b9f047cfd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.434813 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.434933 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.436022 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.434959 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.436652 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7sjs\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-kube-api-access-j7sjs\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.437906 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.438753 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.447491 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:19 crc kubenswrapper[4916]: I1203 19:47:19.520720 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.313748 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.315092 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.319168 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.319433 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-664x8" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.320545 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.324179 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.324900 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.328142 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.447921 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85db28fe-52b4-4feb-8461-8c7a7e6e5179-operator-scripts\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.448368 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/85db28fe-52b4-4feb-8461-8c7a7e6e5179-config-data-default\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.448400 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85db28fe-52b4-4feb-8461-8c7a7e6e5179-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.448504 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/85db28fe-52b4-4feb-8461-8c7a7e6e5179-config-data-generated\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.448553 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/85db28fe-52b4-4feb-8461-8c7a7e6e5179-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.448840 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.449116 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwt4k\" (UniqueName: \"kubernetes.io/projected/85db28fe-52b4-4feb-8461-8c7a7e6e5179-kube-api-access-pwt4k\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.449414 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85db28fe-52b4-4feb-8461-8c7a7e6e5179-kolla-config\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.557683 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.557761 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwt4k\" (UniqueName: \"kubernetes.io/projected/85db28fe-52b4-4feb-8461-8c7a7e6e5179-kube-api-access-pwt4k\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.557805 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85db28fe-52b4-4feb-8461-8c7a7e6e5179-kolla-config\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.557874 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85db28fe-52b4-4feb-8461-8c7a7e6e5179-operator-scripts\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.557900 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/85db28fe-52b4-4feb-8461-8c7a7e6e5179-config-data-default\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.557917 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85db28fe-52b4-4feb-8461-8c7a7e6e5179-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.557981 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/85db28fe-52b4-4feb-8461-8c7a7e6e5179-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.558001 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/85db28fe-52b4-4feb-8461-8c7a7e6e5179-config-data-generated\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.558587 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/85db28fe-52b4-4feb-8461-8c7a7e6e5179-config-data-generated\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.558254 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.559466 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/85db28fe-52b4-4feb-8461-8c7a7e6e5179-config-data-default\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.560366 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85db28fe-52b4-4feb-8461-8c7a7e6e5179-operator-scripts\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.560764 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85db28fe-52b4-4feb-8461-8c7a7e6e5179-kolla-config\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.562226 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85db28fe-52b4-4feb-8461-8c7a7e6e5179-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.566817 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/85db28fe-52b4-4feb-8461-8c7a7e6e5179-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.592195 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.597078 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwt4k\" (UniqueName: \"kubernetes.io/projected/85db28fe-52b4-4feb-8461-8c7a7e6e5179-kube-api-access-pwt4k\") pod \"openstack-galera-0\" (UID: \"85db28fe-52b4-4feb-8461-8c7a7e6e5179\") " pod="openstack/openstack-galera-0" Dec 03 19:47:20 crc kubenswrapper[4916]: I1203 19:47:20.640386 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.799779 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.800924 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.802870 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.803120 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-nlfsk" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.803385 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.803509 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.809823 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.981159 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjqzs\" (UniqueName: \"kubernetes.io/projected/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-kube-api-access-mjqzs\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.981260 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.981349 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.981495 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.981546 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.981597 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.981689 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:21 crc kubenswrapper[4916]: I1203 19:47:21.981744 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.083080 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjqzs\" (UniqueName: \"kubernetes.io/projected/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-kube-api-access-mjqzs\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.083394 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.083440 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.083469 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.083502 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.083551 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.083587 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.083611 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.083704 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.084243 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.085244 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.085511 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.086090 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.089366 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.089523 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.114775 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjqzs\" (UniqueName: \"kubernetes.io/projected/d8b94f14-6cc4-4c21-969c-e1aeb3c199fe-kube-api-access-mjqzs\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.116694 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe\") " pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.150169 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.153830 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.159331 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.159526 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-gbg27" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.160015 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.170935 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.184052 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.288445 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.288573 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-kolla-config\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.288699 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b98b\" (UniqueName: \"kubernetes.io/projected/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-kube-api-access-5b98b\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.288731 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-config-data\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.288784 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.390607 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-kolla-config\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.390684 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b98b\" (UniqueName: \"kubernetes.io/projected/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-kube-api-access-5b98b\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.390712 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-config-data\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.390730 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.390760 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.391357 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-kolla-config\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.391778 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-config-data\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.396099 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.396154 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.432285 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b98b\" (UniqueName: \"kubernetes.io/projected/431d6c6e-0ec4-4eae-8bee-4fdce5e2328d-kube-api-access-5b98b\") pod \"memcached-0\" (UID: \"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d\") " pod="openstack/memcached-0" Dec 03 19:47:22 crc kubenswrapper[4916]: I1203 19:47:22.476222 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 19:47:24 crc kubenswrapper[4916]: I1203 19:47:24.434316 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 19:47:24 crc kubenswrapper[4916]: I1203 19:47:24.436316 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 19:47:24 crc kubenswrapper[4916]: I1203 19:47:24.437931 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-v7kr9" Dec 03 19:47:24 crc kubenswrapper[4916]: I1203 19:47:24.441658 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 19:47:24 crc kubenswrapper[4916]: I1203 19:47:24.627270 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg8t2\" (UniqueName: \"kubernetes.io/projected/bd19dc41-00ef-46d1-ad30-4b9486db33ee-kube-api-access-cg8t2\") pod \"kube-state-metrics-0\" (UID: \"bd19dc41-00ef-46d1-ad30-4b9486db33ee\") " pod="openstack/kube-state-metrics-0" Dec 03 19:47:24 crc kubenswrapper[4916]: I1203 19:47:24.729372 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg8t2\" (UniqueName: \"kubernetes.io/projected/bd19dc41-00ef-46d1-ad30-4b9486db33ee-kube-api-access-cg8t2\") pod \"kube-state-metrics-0\" (UID: \"bd19dc41-00ef-46d1-ad30-4b9486db33ee\") " pod="openstack/kube-state-metrics-0" Dec 03 19:47:24 crc kubenswrapper[4916]: I1203 19:47:24.760510 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg8t2\" (UniqueName: \"kubernetes.io/projected/bd19dc41-00ef-46d1-ad30-4b9486db33ee-kube-api-access-cg8t2\") pod \"kube-state-metrics-0\" (UID: \"bd19dc41-00ef-46d1-ad30-4b9486db33ee\") " pod="openstack/kube-state-metrics-0" Dec 03 19:47:25 crc kubenswrapper[4916]: I1203 19:47:25.060281 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.624685 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-stq8b"] Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.626241 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.632334 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.632646 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-jqs6v" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.632848 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.652749 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-7wkt5"] Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.654860 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.678178 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-stq8b"] Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.683358 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-7wkt5"] Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.787939 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7cb5f017-c41b-4af3-8455-e1ab42faa626-var-log-ovn\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.787981 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj8w6\" (UniqueName: \"kubernetes.io/projected/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-kube-api-access-rj8w6\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.788018 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-scripts\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.788039 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-var-run\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.788055 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7cb5f017-c41b-4af3-8455-e1ab42faa626-var-run\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.788078 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cb5f017-c41b-4af3-8455-e1ab42faa626-ovn-controller-tls-certs\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.788094 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cb5f017-c41b-4af3-8455-e1ab42faa626-combined-ca-bundle\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.788121 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7cb5f017-c41b-4af3-8455-e1ab42faa626-var-run-ovn\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.788138 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp8gx\" (UniqueName: \"kubernetes.io/projected/7cb5f017-c41b-4af3-8455-e1ab42faa626-kube-api-access-rp8gx\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.788160 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-etc-ovs\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.788179 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-var-lib\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.788248 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7cb5f017-c41b-4af3-8455-e1ab42faa626-scripts\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.788291 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-var-log\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.889283 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7cb5f017-c41b-4af3-8455-e1ab42faa626-var-log-ovn\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.889823 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj8w6\" (UniqueName: \"kubernetes.io/projected/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-kube-api-access-rj8w6\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.889881 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-scripts\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.889901 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7cb5f017-c41b-4af3-8455-e1ab42faa626-var-run\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.889915 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-var-run\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.889945 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cb5f017-c41b-4af3-8455-e1ab42faa626-ovn-controller-tls-certs\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.889972 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cb5f017-c41b-4af3-8455-e1ab42faa626-combined-ca-bundle\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890004 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7cb5f017-c41b-4af3-8455-e1ab42faa626-var-run-ovn\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890023 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp8gx\" (UniqueName: \"kubernetes.io/projected/7cb5f017-c41b-4af3-8455-e1ab42faa626-kube-api-access-rp8gx\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890055 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-etc-ovs\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890077 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-var-lib\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890112 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7cb5f017-c41b-4af3-8455-e1ab42faa626-scripts\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890140 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-var-log\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890333 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-var-log\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.889788 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7cb5f017-c41b-4af3-8455-e1ab42faa626-var-log-ovn\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890415 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-var-run\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890489 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7cb5f017-c41b-4af3-8455-e1ab42faa626-var-run-ovn\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890642 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7cb5f017-c41b-4af3-8455-e1ab42faa626-var-run\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890664 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-var-lib\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.890873 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-etc-ovs\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.892374 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7cb5f017-c41b-4af3-8455-e1ab42faa626-scripts\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.895735 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-scripts\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.899066 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cb5f017-c41b-4af3-8455-e1ab42faa626-combined-ca-bundle\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.899457 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/7cb5f017-c41b-4af3-8455-e1ab42faa626-ovn-controller-tls-certs\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.907354 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp8gx\" (UniqueName: \"kubernetes.io/projected/7cb5f017-c41b-4af3-8455-e1ab42faa626-kube-api-access-rp8gx\") pod \"ovn-controller-stq8b\" (UID: \"7cb5f017-c41b-4af3-8455-e1ab42faa626\") " pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.908153 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj8w6\" (UniqueName: \"kubernetes.io/projected/4180ae6f-d0a4-4af0-b89c-48ab118b3f8c-kube-api-access-rj8w6\") pod \"ovn-controller-ovs-7wkt5\" (UID: \"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c\") " pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.951281 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-stq8b" Dec 03 19:47:27 crc kubenswrapper[4916]: I1203 19:47:27.972511 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.062931 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.064485 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.074311 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.076900 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.076900 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.077156 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-9h9xn" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.077166 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.084927 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.203587 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.203675 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87458b34-0f3f-430d-8c93-a3138854fc20-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.203696 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87458b34-0f3f-430d-8c93-a3138854fc20-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.203727 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/87458b34-0f3f-430d-8c93-a3138854fc20-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.203746 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87458b34-0f3f-430d-8c93-a3138854fc20-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.203767 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87458b34-0f3f-430d-8c93-a3138854fc20-config\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.203786 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/87458b34-0f3f-430d-8c93-a3138854fc20-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.203809 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbjjg\" (UniqueName: \"kubernetes.io/projected/87458b34-0f3f-430d-8c93-a3138854fc20-kube-api-access-zbjjg\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.305670 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87458b34-0f3f-430d-8c93-a3138854fc20-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.305720 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87458b34-0f3f-430d-8c93-a3138854fc20-config\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.305742 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/87458b34-0f3f-430d-8c93-a3138854fc20-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.305770 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbjjg\" (UniqueName: \"kubernetes.io/projected/87458b34-0f3f-430d-8c93-a3138854fc20-kube-api-access-zbjjg\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.305831 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.305870 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87458b34-0f3f-430d-8c93-a3138854fc20-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.305886 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87458b34-0f3f-430d-8c93-a3138854fc20-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.305912 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/87458b34-0f3f-430d-8c93-a3138854fc20-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.306765 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.306978 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87458b34-0f3f-430d-8c93-a3138854fc20-config\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.308738 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87458b34-0f3f-430d-8c93-a3138854fc20-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.309641 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/87458b34-0f3f-430d-8c93-a3138854fc20-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.312582 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87458b34-0f3f-430d-8c93-a3138854fc20-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.317327 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87458b34-0f3f-430d-8c93-a3138854fc20-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.321983 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/87458b34-0f3f-430d-8c93-a3138854fc20-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.322388 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbjjg\" (UniqueName: \"kubernetes.io/projected/87458b34-0f3f-430d-8c93-a3138854fc20-kube-api-access-zbjjg\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.340178 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87458b34-0f3f-430d-8c93-a3138854fc20\") " pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:28 crc kubenswrapper[4916]: I1203 19:47:28.393652 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.570601 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.576811 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.581160 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-gqcwv" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.581348 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.581773 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.581803 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.585510 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.760433 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.760473 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.760539 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-config\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.760560 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.760751 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.760831 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.760859 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xs5v\" (UniqueName: \"kubernetes.io/projected/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-kube-api-access-6xs5v\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.760885 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.863461 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.863715 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.863771 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.864053 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.864184 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-config\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.864267 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.864425 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.864479 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.864516 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xs5v\" (UniqueName: \"kubernetes.io/projected/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-kube-api-access-6xs5v\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.865179 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.865600 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-config\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.866397 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.870013 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.871005 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.872063 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.899751 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xs5v\" (UniqueName: \"kubernetes.io/projected/8e2bf00a-bdbe-4c59-b020-b1c3d96375f3-kube-api-access-6xs5v\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.916241 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3\") " pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:31 crc kubenswrapper[4916]: I1203 19:47:31.916309 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 19:47:32 crc kubenswrapper[4916]: I1203 19:47:32.214200 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:32 crc kubenswrapper[4916]: E1203 19:47:32.373876 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 19:47:32 crc kubenswrapper[4916]: E1203 19:47:32.374062 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7hhnv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-ckgmb_openstack(249477db-bbd7-4c96-a0b7-e36c3243598d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 19:47:32 crc kubenswrapper[4916]: E1203 19:47:32.375280 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" podUID="249477db-bbd7-4c96-a0b7-e36c3243598d" Dec 03 19:47:32 crc kubenswrapper[4916]: W1203 19:47:32.387457 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod431d6c6e_0ec4_4eae_8bee_4fdce5e2328d.slice/crio-ce4650c7402275dbcfd0d81208e1e51fa82de71c3e473fc0452522df01ee67b4 WatchSource:0}: Error finding container ce4650c7402275dbcfd0d81208e1e51fa82de71c3e473fc0452522df01ee67b4: Status 404 returned error can't find the container with id ce4650c7402275dbcfd0d81208e1e51fa82de71c3e473fc0452522df01ee67b4 Dec 03 19:47:32 crc kubenswrapper[4916]: E1203 19:47:32.390774 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 19:47:32 crc kubenswrapper[4916]: E1203 19:47:32.390966 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zv27p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-k9wvr_openstack(685fd815-e94b-4130-a1cd-c566bb975525): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 19:47:32 crc kubenswrapper[4916]: E1203 19:47:32.392151 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" podUID="685fd815-e94b-4130-a1cd-c566bb975525" Dec 03 19:47:32 crc kubenswrapper[4916]: I1203 19:47:32.852786 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 19:47:32 crc kubenswrapper[4916]: I1203 19:47:32.868487 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 19:47:32 crc kubenswrapper[4916]: W1203 19:47:32.976254 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6ffb0836_d978_4f53_9a48_1174b647eeaf.slice/crio-ff49beee11b39bed7b082045ed64a6a2ff6f7003edeed115db24730a278f6d62 WatchSource:0}: Error finding container ff49beee11b39bed7b082045ed64a6a2ff6f7003edeed115db24730a278f6d62: Status 404 returned error can't find the container with id ff49beee11b39bed7b082045ed64a6a2ff6f7003edeed115db24730a278f6d62 Dec 03 19:47:32 crc kubenswrapper[4916]: W1203 19:47:32.978199 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85db28fe_52b4_4feb_8461_8c7a7e6e5179.slice/crio-2d5c120274efa64b65007f9824310930e56c3ee4e4083c11db2f397871dfa613 WatchSource:0}: Error finding container 2d5c120274efa64b65007f9824310930e56c3ee4e4083c11db2f397871dfa613: Status 404 returned error can't find the container with id 2d5c120274efa64b65007f9824310930e56c3ee4e4083c11db2f397871dfa613 Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.008241 4916 generic.go:334] "Generic (PLEG): container finished" podID="5b4a627a-cfa7-4e96-9563-5e859ad5525a" containerID="23a7034862058092867ea27f17beafab46471d1c3abd0dd8c266c065a983f8ad" exitCode=0 Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.008313 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" event={"ID":"5b4a627a-cfa7-4e96-9563-5e859ad5525a","Type":"ContainerDied","Data":"23a7034862058092867ea27f17beafab46471d1c3abd0dd8c266c065a983f8ad"} Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.013279 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6ffb0836-d978-4f53-9a48-1174b647eeaf","Type":"ContainerStarted","Data":"ff49beee11b39bed7b082045ed64a6a2ff6f7003edeed115db24730a278f6d62"} Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.014729 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d","Type":"ContainerStarted","Data":"ce4650c7402275dbcfd0d81208e1e51fa82de71c3e473fc0452522df01ee67b4"} Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.017550 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"85db28fe-52b4-4feb-8461-8c7a7e6e5179","Type":"ContainerStarted","Data":"2d5c120274efa64b65007f9824310930e56c3ee4e4083c11db2f397871dfa613"} Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.022007 4916 generic.go:334] "Generic (PLEG): container finished" podID="b5c9236a-7a8f-4fd9-9831-2370c10ab466" containerID="ab77c5f73f1892c1ac31529996582b34da241d73afb4e6a475483e0a93e7afc3" exitCode=0 Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.022905 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" event={"ID":"b5c9236a-7a8f-4fd9-9831-2370c10ab466","Type":"ContainerDied","Data":"ab77c5f73f1892c1ac31529996582b34da241d73afb4e6a475483e0a93e7afc3"} Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.040788 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.058415 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-stq8b"] Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.113637 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.125640 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 19:47:33 crc kubenswrapper[4916]: W1203 19:47:33.139653 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8b94f14_6cc4_4c21_969c_e1aeb3c199fe.slice/crio-67a6122e40d6fbe8ba93b4285cc401c352e0941e052c434195a32df1b56fd944 WatchSource:0}: Error finding container 67a6122e40d6fbe8ba93b4285cc401c352e0941e052c434195a32df1b56fd944: Status 404 returned error can't find the container with id 67a6122e40d6fbe8ba93b4285cc401c352e0941e052c434195a32df1b56fd944 Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.234662 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 19:47:33 crc kubenswrapper[4916]: W1203 19:47:33.235332 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87458b34_0f3f_430d_8c93_a3138854fc20.slice/crio-341202eaf49250409b2013a544b55e1ab5f01fddc27a80935524bfe69b562007 WatchSource:0}: Error finding container 341202eaf49250409b2013a544b55e1ab5f01fddc27a80935524bfe69b562007: Status 404 returned error can't find the container with id 341202eaf49250409b2013a544b55e1ab5f01fddc27a80935524bfe69b562007 Dec 03 19:47:33 crc kubenswrapper[4916]: E1203 19:47:33.300776 4916 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 03 19:47:33 crc kubenswrapper[4916]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/5b4a627a-cfa7-4e96-9563-5e859ad5525a/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 03 19:47:33 crc kubenswrapper[4916]: > podSandboxID="d27f369993a95d9e0dd8ec480a831bad9c576ce9e5c340226d6f1b000a4beb3f" Dec 03 19:47:33 crc kubenswrapper[4916]: E1203 19:47:33.301007 4916 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 03 19:47:33 crc kubenswrapper[4916]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d6vr8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-r22qg_openstack(5b4a627a-cfa7-4e96-9563-5e859ad5525a): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/5b4a627a-cfa7-4e96-9563-5e859ad5525a/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 03 19:47:33 crc kubenswrapper[4916]: > logger="UnhandledError" Dec 03 19:47:33 crc kubenswrapper[4916]: E1203 19:47:33.302326 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/5b4a627a-cfa7-4e96-9563-5e859ad5525a/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" podUID="5b4a627a-cfa7-4e96-9563-5e859ad5525a" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.343260 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-7wkt5"] Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.434632 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 19:47:33 crc kubenswrapper[4916]: W1203 19:47:33.439297 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e2bf00a_bdbe_4c59_b020_b1c3d96375f3.slice/crio-a2aa0302c6b9991261d8a6aeaf29575d1e31920722402b593903d07fd34f407e WatchSource:0}: Error finding container a2aa0302c6b9991261d8a6aeaf29575d1e31920722402b593903d07fd34f407e: Status 404 returned error can't find the container with id a2aa0302c6b9991261d8a6aeaf29575d1e31920722402b593903d07fd34f407e Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.439627 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.562998 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.608788 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-dns-svc\") pod \"249477db-bbd7-4c96-a0b7-e36c3243598d\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.608949 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-config\") pod \"249477db-bbd7-4c96-a0b7-e36c3243598d\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.609023 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hhnv\" (UniqueName: \"kubernetes.io/projected/249477db-bbd7-4c96-a0b7-e36c3243598d-kube-api-access-7hhnv\") pod \"249477db-bbd7-4c96-a0b7-e36c3243598d\" (UID: \"249477db-bbd7-4c96-a0b7-e36c3243598d\") " Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.609414 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "249477db-bbd7-4c96-a0b7-e36c3243598d" (UID: "249477db-bbd7-4c96-a0b7-e36c3243598d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.609421 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-config" (OuterVolumeSpecName: "config") pod "249477db-bbd7-4c96-a0b7-e36c3243598d" (UID: "249477db-bbd7-4c96-a0b7-e36c3243598d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.609861 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.610023 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/249477db-bbd7-4c96-a0b7-e36c3243598d-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.613496 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/249477db-bbd7-4c96-a0b7-e36c3243598d-kube-api-access-7hhnv" (OuterVolumeSpecName: "kube-api-access-7hhnv") pod "249477db-bbd7-4c96-a0b7-e36c3243598d" (UID: "249477db-bbd7-4c96-a0b7-e36c3243598d"). InnerVolumeSpecName "kube-api-access-7hhnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.710755 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/685fd815-e94b-4130-a1cd-c566bb975525-config\") pod \"685fd815-e94b-4130-a1cd-c566bb975525\" (UID: \"685fd815-e94b-4130-a1cd-c566bb975525\") " Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.710906 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zv27p\" (UniqueName: \"kubernetes.io/projected/685fd815-e94b-4130-a1cd-c566bb975525-kube-api-access-zv27p\") pod \"685fd815-e94b-4130-a1cd-c566bb975525\" (UID: \"685fd815-e94b-4130-a1cd-c566bb975525\") " Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.711203 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hhnv\" (UniqueName: \"kubernetes.io/projected/249477db-bbd7-4c96-a0b7-e36c3243598d-kube-api-access-7hhnv\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.711665 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/685fd815-e94b-4130-a1cd-c566bb975525-config" (OuterVolumeSpecName: "config") pod "685fd815-e94b-4130-a1cd-c566bb975525" (UID: "685fd815-e94b-4130-a1cd-c566bb975525"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.714755 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/685fd815-e94b-4130-a1cd-c566bb975525-kube-api-access-zv27p" (OuterVolumeSpecName: "kube-api-access-zv27p") pod "685fd815-e94b-4130-a1cd-c566bb975525" (UID: "685fd815-e94b-4130-a1cd-c566bb975525"). InnerVolumeSpecName "kube-api-access-zv27p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.812648 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/685fd815-e94b-4130-a1cd-c566bb975525-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:33 crc kubenswrapper[4916]: I1203 19:47:33.812676 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zv27p\" (UniqueName: \"kubernetes.io/projected/685fd815-e94b-4130-a1cd-c566bb975525-kube-api-access-zv27p\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.030373 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-stq8b" event={"ID":"7cb5f017-c41b-4af3-8455-e1ab42faa626","Type":"ContainerStarted","Data":"f789582934974c3d789c39a39ef754b7d1062b04519cc600fb608ffd6e08ed66"} Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.032274 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bd19dc41-00ef-46d1-ad30-4b9486db33ee","Type":"ContainerStarted","Data":"128c20e6f1b80c185546a7bec9caa838fb0bf250327285fdc66bfe22b7525523"} Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.033384 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3","Type":"ContainerStarted","Data":"a2aa0302c6b9991261d8a6aeaf29575d1e31920722402b593903d07fd34f407e"} Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.034340 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.034332 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-ckgmb" event={"ID":"249477db-bbd7-4c96-a0b7-e36c3243598d","Type":"ContainerDied","Data":"b1408623074e0dba56dafcb65e92c875028fb31bc03746c439effe029627d6c4"} Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.036108 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" event={"ID":"b5c9236a-7a8f-4fd9-9831-2370c10ab466","Type":"ContainerStarted","Data":"e821694afe0367a55cc673ca169d1f451afdfea15bc962d94b7f10d3a084c861"} Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.036198 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.037289 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe","Type":"ContainerStarted","Data":"67a6122e40d6fbe8ba93b4285cc401c352e0941e052c434195a32df1b56fd944"} Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.039205 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7wkt5" event={"ID":"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c","Type":"ContainerStarted","Data":"5d6b23e7b8735f1689f95108817f40412a42ee8f907e17465399137386d68330"} Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.040510 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"13520585-08f1-45f7-b40d-d53b9f047cfd","Type":"ContainerStarted","Data":"f43c4f8d8a0ee9385c3c8c3b6b0a03f6a9c38db83c860835f0a6751ecebe4191"} Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.041470 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87458b34-0f3f-430d-8c93-a3138854fc20","Type":"ContainerStarted","Data":"341202eaf49250409b2013a544b55e1ab5f01fddc27a80935524bfe69b562007"} Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.043176 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.044705 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-k9wvr" event={"ID":"685fd815-e94b-4130-a1cd-c566bb975525","Type":"ContainerDied","Data":"2990c186fc364d1f6971e369616a020851b3dc0036a9b86c71a494e4fc42a525"} Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.053016 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" podStartSLOduration=3.363230496 podStartE2EDuration="17.053001941s" podCreationTimestamp="2025-12-03 19:47:17 +0000 UTC" firstStartedPulling="2025-12-03 19:47:18.810844876 +0000 UTC m=+1054.773655132" lastFinishedPulling="2025-12-03 19:47:32.500616301 +0000 UTC m=+1068.463426577" observedRunningTime="2025-12-03 19:47:34.051045218 +0000 UTC m=+1070.013855484" watchObservedRunningTime="2025-12-03 19:47:34.053001941 +0000 UTC m=+1070.015812207" Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.104687 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k9wvr"] Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.113660 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k9wvr"] Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.141808 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ckgmb"] Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.147054 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ckgmb"] Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.494365 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="249477db-bbd7-4c96-a0b7-e36c3243598d" path="/var/lib/kubelet/pods/249477db-bbd7-4c96-a0b7-e36c3243598d/volumes" Dec 03 19:47:34 crc kubenswrapper[4916]: I1203 19:47:34.495373 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="685fd815-e94b-4130-a1cd-c566bb975525" path="/var/lib/kubelet/pods/685fd815-e94b-4130-a1cd-c566bb975525/volumes" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.678196 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-ndv46"] Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.679849 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.685550 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.686199 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ndv46"] Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.834011 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-f7sfp"] Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.855402 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c79e8746-0571-48ab-ad7d-94b92eadc07e-config\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.855506 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/c79e8746-0571-48ab-ad7d-94b92eadc07e-ovs-rundir\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.855537 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c79e8746-0571-48ab-ad7d-94b92eadc07e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.855584 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/c79e8746-0571-48ab-ad7d-94b92eadc07e-ovn-rundir\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.855607 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c79e8746-0571-48ab-ad7d-94b92eadc07e-combined-ca-bundle\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.855626 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtjd4\" (UniqueName: \"kubernetes.io/projected/c79e8746-0571-48ab-ad7d-94b92eadc07e-kube-api-access-dtjd4\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.862288 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-7lxt2"] Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.863650 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.866693 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.880237 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-7lxt2"] Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.959461 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/c79e8746-0571-48ab-ad7d-94b92eadc07e-ovs-rundir\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.959527 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m89r8\" (UniqueName: \"kubernetes.io/projected/582ffc79-4923-4b65-a536-c2f757507a4f-kube-api-access-m89r8\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.959557 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c79e8746-0571-48ab-ad7d-94b92eadc07e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.959593 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/c79e8746-0571-48ab-ad7d-94b92eadc07e-ovn-rundir\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.959624 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c79e8746-0571-48ab-ad7d-94b92eadc07e-combined-ca-bundle\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.959662 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtjd4\" (UniqueName: \"kubernetes.io/projected/c79e8746-0571-48ab-ad7d-94b92eadc07e-kube-api-access-dtjd4\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.959700 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-config\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.959732 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.959784 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.959810 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c79e8746-0571-48ab-ad7d-94b92eadc07e-config\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.962454 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c79e8746-0571-48ab-ad7d-94b92eadc07e-config\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.963207 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/c79e8746-0571-48ab-ad7d-94b92eadc07e-ovs-rundir\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.969586 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c79e8746-0571-48ab-ad7d-94b92eadc07e-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.969754 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/c79e8746-0571-48ab-ad7d-94b92eadc07e-ovn-rundir\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.969807 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c79e8746-0571-48ab-ad7d-94b92eadc07e-combined-ca-bundle\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:35 crc kubenswrapper[4916]: I1203 19:47:35.988550 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtjd4\" (UniqueName: \"kubernetes.io/projected/c79e8746-0571-48ab-ad7d-94b92eadc07e-kube-api-access-dtjd4\") pod \"ovn-controller-metrics-ndv46\" (UID: \"c79e8746-0571-48ab-ad7d-94b92eadc07e\") " pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.005217 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ndv46" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.021535 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-r22qg"] Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.050431 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-fz592"] Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.051961 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.060307 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-fz592"] Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.062121 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.062189 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.062219 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.062274 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lj2l\" (UniqueName: \"kubernetes.io/projected/258215fe-726c-4a33-81f2-137aa706c6d1-kube-api-access-8lj2l\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.062304 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m89r8\" (UniqueName: \"kubernetes.io/projected/582ffc79-4923-4b65-a536-c2f757507a4f-kube-api-access-m89r8\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.062367 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-config\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.062404 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.062425 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.062442 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-config\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.063120 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.064212 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.064745 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.067272 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-config\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.073960 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" podUID="b5c9236a-7a8f-4fd9-9831-2370c10ab466" containerName="dnsmasq-dns" containerID="cri-o://e821694afe0367a55cc673ca169d1f451afdfea15bc962d94b7f10d3a084c861" gracePeriod=10 Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.095424 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m89r8\" (UniqueName: \"kubernetes.io/projected/582ffc79-4923-4b65-a536-c2f757507a4f-kube-api-access-m89r8\") pod \"dnsmasq-dns-7fd796d7df-7lxt2\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.163918 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.163969 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-config\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.164020 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.164051 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.164103 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lj2l\" (UniqueName: \"kubernetes.io/projected/258215fe-726c-4a33-81f2-137aa706c6d1-kube-api-access-8lj2l\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.164844 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.164865 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-config\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.165141 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.165344 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.180540 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lj2l\" (UniqueName: \"kubernetes.io/projected/258215fe-726c-4a33-81f2-137aa706c6d1-kube-api-access-8lj2l\") pod \"dnsmasq-dns-86db49b7ff-fz592\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.184046 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:36 crc kubenswrapper[4916]: I1203 19:47:36.387135 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:37 crc kubenswrapper[4916]: I1203 19:47:37.086620 4916 generic.go:334] "Generic (PLEG): container finished" podID="b5c9236a-7a8f-4fd9-9831-2370c10ab466" containerID="e821694afe0367a55cc673ca169d1f451afdfea15bc962d94b7f10d3a084c861" exitCode=0 Dec 03 19:47:37 crc kubenswrapper[4916]: I1203 19:47:37.086671 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" event={"ID":"b5c9236a-7a8f-4fd9-9831-2370c10ab466","Type":"ContainerDied","Data":"e821694afe0367a55cc673ca169d1f451afdfea15bc962d94b7f10d3a084c861"} Dec 03 19:47:38 crc kubenswrapper[4916]: I1203 19:47:38.315510 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" podUID="b5c9236a-7a8f-4fd9-9831-2370c10ab466" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.98:5353: connect: connection refused" Dec 03 19:47:41 crc kubenswrapper[4916]: I1203 19:47:41.446894 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:41 crc kubenswrapper[4916]: I1203 19:47:41.552918 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-config\") pod \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " Dec 03 19:47:41 crc kubenswrapper[4916]: I1203 19:47:41.553017 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wkwm\" (UniqueName: \"kubernetes.io/projected/b5c9236a-7a8f-4fd9-9831-2370c10ab466-kube-api-access-8wkwm\") pod \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " Dec 03 19:47:41 crc kubenswrapper[4916]: I1203 19:47:41.553502 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-dns-svc\") pod \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\" (UID: \"b5c9236a-7a8f-4fd9-9831-2370c10ab466\") " Dec 03 19:47:41 crc kubenswrapper[4916]: I1203 19:47:41.556680 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5c9236a-7a8f-4fd9-9831-2370c10ab466-kube-api-access-8wkwm" (OuterVolumeSpecName: "kube-api-access-8wkwm") pod "b5c9236a-7a8f-4fd9-9831-2370c10ab466" (UID: "b5c9236a-7a8f-4fd9-9831-2370c10ab466"). InnerVolumeSpecName "kube-api-access-8wkwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:47:41 crc kubenswrapper[4916]: I1203 19:47:41.598511 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-config" (OuterVolumeSpecName: "config") pod "b5c9236a-7a8f-4fd9-9831-2370c10ab466" (UID: "b5c9236a-7a8f-4fd9-9831-2370c10ab466"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:47:41 crc kubenswrapper[4916]: I1203 19:47:41.603785 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b5c9236a-7a8f-4fd9-9831-2370c10ab466" (UID: "b5c9236a-7a8f-4fd9-9831-2370c10ab466"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:47:41 crc kubenswrapper[4916]: I1203 19:47:41.654646 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wkwm\" (UniqueName: \"kubernetes.io/projected/b5c9236a-7a8f-4fd9-9831-2370c10ab466-kube-api-access-8wkwm\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:41 crc kubenswrapper[4916]: I1203 19:47:41.654670 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:41 crc kubenswrapper[4916]: I1203 19:47:41.654681 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5c9236a-7a8f-4fd9-9831-2370c10ab466-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:41 crc kubenswrapper[4916]: I1203 19:47:41.878893 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ndv46"] Dec 03 19:47:41 crc kubenswrapper[4916]: W1203 19:47:41.953714 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc79e8746_0571_48ab_ad7d_94b92eadc07e.slice/crio-f4d25d7bf99d8245863ffa1c0b3de65ecd958465b27bb2d7ff199e362faa9d8d WatchSource:0}: Error finding container f4d25d7bf99d8245863ffa1c0b3de65ecd958465b27bb2d7ff199e362faa9d8d: Status 404 returned error can't find the container with id f4d25d7bf99d8245863ffa1c0b3de65ecd958465b27bb2d7ff199e362faa9d8d Dec 03 19:47:42 crc kubenswrapper[4916]: I1203 19:47:42.127829 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ndv46" event={"ID":"c79e8746-0571-48ab-ad7d-94b92eadc07e","Type":"ContainerStarted","Data":"f4d25d7bf99d8245863ffa1c0b3de65ecd958465b27bb2d7ff199e362faa9d8d"} Dec 03 19:47:42 crc kubenswrapper[4916]: I1203 19:47:42.130078 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" event={"ID":"b5c9236a-7a8f-4fd9-9831-2370c10ab466","Type":"ContainerDied","Data":"7b497c9481b3d206b67e68b178e02aa0865cf22ba62a85dd68d1df695c23bea6"} Dec 03 19:47:42 crc kubenswrapper[4916]: I1203 19:47:42.130127 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-f7sfp" Dec 03 19:47:42 crc kubenswrapper[4916]: I1203 19:47:42.130137 4916 scope.go:117] "RemoveContainer" containerID="e821694afe0367a55cc673ca169d1f451afdfea15bc962d94b7f10d3a084c861" Dec 03 19:47:42 crc kubenswrapper[4916]: I1203 19:47:42.165277 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-f7sfp"] Dec 03 19:47:42 crc kubenswrapper[4916]: I1203 19:47:42.171268 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-f7sfp"] Dec 03 19:47:42 crc kubenswrapper[4916]: I1203 19:47:42.210495 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-fz592"] Dec 03 19:47:42 crc kubenswrapper[4916]: W1203 19:47:42.248610 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod258215fe_726c_4a33_81f2_137aa706c6d1.slice/crio-cda3c70801c022ac0e6090f0da717985cfc599d5d442d69a95187e619fec8a91 WatchSource:0}: Error finding container cda3c70801c022ac0e6090f0da717985cfc599d5d442d69a95187e619fec8a91: Status 404 returned error can't find the container with id cda3c70801c022ac0e6090f0da717985cfc599d5d442d69a95187e619fec8a91 Dec 03 19:47:42 crc kubenswrapper[4916]: I1203 19:47:42.300058 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-7lxt2"] Dec 03 19:47:42 crc kubenswrapper[4916]: I1203 19:47:42.385022 4916 scope.go:117] "RemoveContainer" containerID="ab77c5f73f1892c1ac31529996582b34da241d73afb4e6a475483e0a93e7afc3" Dec 03 19:47:42 crc kubenswrapper[4916]: I1203 19:47:42.490842 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5c9236a-7a8f-4fd9-9831-2370c10ab466" path="/var/lib/kubelet/pods/b5c9236a-7a8f-4fd9-9831-2370c10ab466/volumes" Dec 03 19:47:42 crc kubenswrapper[4916]: W1203 19:47:42.495045 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod582ffc79_4923_4b65_a536_c2f757507a4f.slice/crio-1667e9bc4b32616a57ccb79ef0b58ff3d5f65d15b2bdb82d5bf3e0da64835a5f WatchSource:0}: Error finding container 1667e9bc4b32616a57ccb79ef0b58ff3d5f65d15b2bdb82d5bf3e0da64835a5f: Status 404 returned error can't find the container with id 1667e9bc4b32616a57ccb79ef0b58ff3d5f65d15b2bdb82d5bf3e0da64835a5f Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.138234 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe","Type":"ContainerStarted","Data":"d6844ca5bf656699425ac7cfe7f578b9228448bda1d7a71f8ccdcce3b0f76810"} Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.141680 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-stq8b" event={"ID":"7cb5f017-c41b-4af3-8455-e1ab42faa626","Type":"ContainerStarted","Data":"38a6ca14a6edb3540aa6ab665d44d5a330363f693c2fcc7eaa739e009d778379"} Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.141832 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-stq8b" Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.143978 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" event={"ID":"582ffc79-4923-4b65-a536-c2f757507a4f","Type":"ContainerStarted","Data":"1667e9bc4b32616a57ccb79ef0b58ff3d5f65d15b2bdb82d5bf3e0da64835a5f"} Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.145666 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"431d6c6e-0ec4-4eae-8bee-4fdce5e2328d","Type":"ContainerStarted","Data":"0737d16b7df1988086d5a15d2248ff223e86c0a00304c73a0b0640a3e108d86c"} Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.146307 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.148364 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3","Type":"ContainerStarted","Data":"d5419e8f7249938b15991a7eeebdfd90085deab8d40c32cff1a0900ab7613ecb"} Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.149506 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" event={"ID":"258215fe-726c-4a33-81f2-137aa706c6d1","Type":"ContainerStarted","Data":"cda3c70801c022ac0e6090f0da717985cfc599d5d442d69a95187e619fec8a91"} Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.152230 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" event={"ID":"5b4a627a-cfa7-4e96-9563-5e859ad5525a","Type":"ContainerStarted","Data":"40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef"} Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.152342 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" podUID="5b4a627a-cfa7-4e96-9563-5e859ad5525a" containerName="dnsmasq-dns" containerID="cri-o://40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef" gracePeriod=10 Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.152412 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.200130 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-stq8b" podStartSLOduration=8.251051784 podStartE2EDuration="16.200111557s" podCreationTimestamp="2025-12-03 19:47:27 +0000 UTC" firstStartedPulling="2025-12-03 19:47:33.150157223 +0000 UTC m=+1069.112967489" lastFinishedPulling="2025-12-03 19:47:41.099216996 +0000 UTC m=+1077.062027262" observedRunningTime="2025-12-03 19:47:43.174583107 +0000 UTC m=+1079.137393383" watchObservedRunningTime="2025-12-03 19:47:43.200111557 +0000 UTC m=+1079.162921823" Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.210282 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=12.81093468 podStartE2EDuration="21.210253297s" podCreationTimestamp="2025-12-03 19:47:22 +0000 UTC" firstStartedPulling="2025-12-03 19:47:32.416916722 +0000 UTC m=+1068.379726998" lastFinishedPulling="2025-12-03 19:47:40.816235329 +0000 UTC m=+1076.779045615" observedRunningTime="2025-12-03 19:47:43.197506708 +0000 UTC m=+1079.160316984" watchObservedRunningTime="2025-12-03 19:47:43.210253297 +0000 UTC m=+1079.173063563" Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.217787 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" podStartSLOduration=12.332820293 podStartE2EDuration="26.217771937s" podCreationTimestamp="2025-12-03 19:47:17 +0000 UTC" firstStartedPulling="2025-12-03 19:47:18.652421506 +0000 UTC m=+1054.615231772" lastFinishedPulling="2025-12-03 19:47:32.53737315 +0000 UTC m=+1068.500183416" observedRunningTime="2025-12-03 19:47:43.217329096 +0000 UTC m=+1079.180139362" watchObservedRunningTime="2025-12-03 19:47:43.217771937 +0000 UTC m=+1079.180582203" Dec 03 19:47:43 crc kubenswrapper[4916]: I1203 19:47:43.862231 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.008006 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-config\") pod \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.008242 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6vr8\" (UniqueName: \"kubernetes.io/projected/5b4a627a-cfa7-4e96-9563-5e859ad5525a-kube-api-access-d6vr8\") pod \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.008308 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-dns-svc\") pod \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\" (UID: \"5b4a627a-cfa7-4e96-9563-5e859ad5525a\") " Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.097174 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b4a627a-cfa7-4e96-9563-5e859ad5525a-kube-api-access-d6vr8" (OuterVolumeSpecName: "kube-api-access-d6vr8") pod "5b4a627a-cfa7-4e96-9563-5e859ad5525a" (UID: "5b4a627a-cfa7-4e96-9563-5e859ad5525a"). InnerVolumeSpecName "kube-api-access-d6vr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.110340 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6vr8\" (UniqueName: \"kubernetes.io/projected/5b4a627a-cfa7-4e96-9563-5e859ad5525a-kube-api-access-d6vr8\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.167439 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87458b34-0f3f-430d-8c93-a3138854fc20","Type":"ContainerStarted","Data":"6e0ce747a741e49c5dd8479fec93e81d87014e3687837db6709db15ad5089ca9"} Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.171262 4916 generic.go:334] "Generic (PLEG): container finished" podID="5b4a627a-cfa7-4e96-9563-5e859ad5525a" containerID="40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef" exitCode=0 Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.171312 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.171343 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" event={"ID":"5b4a627a-cfa7-4e96-9563-5e859ad5525a","Type":"ContainerDied","Data":"40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef"} Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.171389 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-r22qg" event={"ID":"5b4a627a-cfa7-4e96-9563-5e859ad5525a","Type":"ContainerDied","Data":"d27f369993a95d9e0dd8ec480a831bad9c576ce9e5c340226d6f1b000a4beb3f"} Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.171426 4916 scope.go:117] "RemoveContainer" containerID="40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef" Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.175616 4916 generic.go:334] "Generic (PLEG): container finished" podID="582ffc79-4923-4b65-a536-c2f757507a4f" containerID="4bf450e40963d45863a6ad289d5a2655ec631e7d7cd69f70afa7b9123330b3ca" exitCode=0 Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.175671 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" event={"ID":"582ffc79-4923-4b65-a536-c2f757507a4f","Type":"ContainerDied","Data":"4bf450e40963d45863a6ad289d5a2655ec631e7d7cd69f70afa7b9123330b3ca"} Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.178066 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"85db28fe-52b4-4feb-8461-8c7a7e6e5179","Type":"ContainerStarted","Data":"c7fbc3016577d447c728bca1789db7a3051e053f2d62f3cd930cf8afbe6de2e1"} Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.181930 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bd19dc41-00ef-46d1-ad30-4b9486db33ee","Type":"ContainerStarted","Data":"cdc92955a39188c448b97a8a07238252d9dfc1a3177ffe84ac938662a67ae7be"} Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.182061 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.183923 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7wkt5" event={"ID":"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c","Type":"ContainerStarted","Data":"fd55963198860bf48ec3d6c792ebaaba548633f8c81237fd3f6804e2da035769"} Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.186099 4916 generic.go:334] "Generic (PLEG): container finished" podID="258215fe-726c-4a33-81f2-137aa706c6d1" containerID="e932f3fdb6bee5b8d7e07b7652fa967b23186406edf46cba451207d8900c18d5" exitCode=0 Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.186285 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" event={"ID":"258215fe-726c-4a33-81f2-137aa706c6d1","Type":"ContainerDied","Data":"e932f3fdb6bee5b8d7e07b7652fa967b23186406edf46cba451207d8900c18d5"} Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.188480 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"13520585-08f1-45f7-b40d-d53b9f047cfd","Type":"ContainerStarted","Data":"4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a"} Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.220321 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5b4a627a-cfa7-4e96-9563-5e859ad5525a" (UID: "5b4a627a-cfa7-4e96-9563-5e859ad5525a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.224003 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-config" (OuterVolumeSpecName: "config") pod "5b4a627a-cfa7-4e96-9563-5e859ad5525a" (UID: "5b4a627a-cfa7-4e96-9563-5e859ad5525a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.275805 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=10.027198823 podStartE2EDuration="20.275785128s" podCreationTimestamp="2025-12-03 19:47:24 +0000 UTC" firstStartedPulling="2025-12-03 19:47:33.150343228 +0000 UTC m=+1069.113153494" lastFinishedPulling="2025-12-03 19:47:43.398929533 +0000 UTC m=+1079.361739799" observedRunningTime="2025-12-03 19:47:44.271442993 +0000 UTC m=+1080.234253259" watchObservedRunningTime="2025-12-03 19:47:44.275785128 +0000 UTC m=+1080.238595394" Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.314341 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.314371 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b4a627a-cfa7-4e96-9563-5e859ad5525a-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.517312 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-r22qg"] Dec 03 19:47:44 crc kubenswrapper[4916]: I1203 19:47:44.525249 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-r22qg"] Dec 03 19:47:45 crc kubenswrapper[4916]: I1203 19:47:45.197696 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6ffb0836-d978-4f53-9a48-1174b647eeaf","Type":"ContainerStarted","Data":"e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7"} Dec 03 19:47:45 crc kubenswrapper[4916]: I1203 19:47:45.202019 4916 generic.go:334] "Generic (PLEG): container finished" podID="4180ae6f-d0a4-4af0-b89c-48ab118b3f8c" containerID="fd55963198860bf48ec3d6c792ebaaba548633f8c81237fd3f6804e2da035769" exitCode=0 Dec 03 19:47:45 crc kubenswrapper[4916]: I1203 19:47:45.202160 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7wkt5" event={"ID":"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c","Type":"ContainerDied","Data":"fd55963198860bf48ec3d6c792ebaaba548633f8c81237fd3f6804e2da035769"} Dec 03 19:47:46 crc kubenswrapper[4916]: I1203 19:47:46.158996 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:47:46 crc kubenswrapper[4916]: I1203 19:47:46.159955 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:47:46 crc kubenswrapper[4916]: I1203 19:47:46.494770 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b4a627a-cfa7-4e96-9563-5e859ad5525a" path="/var/lib/kubelet/pods/5b4a627a-cfa7-4e96-9563-5e859ad5525a/volumes" Dec 03 19:47:47 crc kubenswrapper[4916]: I1203 19:47:47.478391 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 03 19:47:49 crc kubenswrapper[4916]: I1203 19:47:49.240979 4916 generic.go:334] "Generic (PLEG): container finished" podID="d8b94f14-6cc4-4c21-969c-e1aeb3c199fe" containerID="d6844ca5bf656699425ac7cfe7f578b9228448bda1d7a71f8ccdcce3b0f76810" exitCode=0 Dec 03 19:47:49 crc kubenswrapper[4916]: I1203 19:47:49.241062 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe","Type":"ContainerDied","Data":"d6844ca5bf656699425ac7cfe7f578b9228448bda1d7a71f8ccdcce3b0f76810"} Dec 03 19:47:49 crc kubenswrapper[4916]: I1203 19:47:49.604490 4916 scope.go:117] "RemoveContainer" containerID="23a7034862058092867ea27f17beafab46471d1c3abd0dd8c266c065a983f8ad" Dec 03 19:47:49 crc kubenswrapper[4916]: I1203 19:47:49.709142 4916 scope.go:117] "RemoveContainer" containerID="40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef" Dec 03 19:47:49 crc kubenswrapper[4916]: E1203 19:47:49.709476 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef\": container with ID starting with 40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef not found: ID does not exist" containerID="40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef" Dec 03 19:47:49 crc kubenswrapper[4916]: I1203 19:47:49.709558 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef"} err="failed to get container status \"40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef\": rpc error: code = NotFound desc = could not find container \"40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef\": container with ID starting with 40628662e9ad8c3cb64e38ca1dfa339d29adf75c3ed0834224adc8344f83d6ef not found: ID does not exist" Dec 03 19:47:49 crc kubenswrapper[4916]: I1203 19:47:49.709694 4916 scope.go:117] "RemoveContainer" containerID="23a7034862058092867ea27f17beafab46471d1c3abd0dd8c266c065a983f8ad" Dec 03 19:47:49 crc kubenswrapper[4916]: E1203 19:47:49.710173 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23a7034862058092867ea27f17beafab46471d1c3abd0dd8c266c065a983f8ad\": container with ID starting with 23a7034862058092867ea27f17beafab46471d1c3abd0dd8c266c065a983f8ad not found: ID does not exist" containerID="23a7034862058092867ea27f17beafab46471d1c3abd0dd8c266c065a983f8ad" Dec 03 19:47:49 crc kubenswrapper[4916]: I1203 19:47:49.710200 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23a7034862058092867ea27f17beafab46471d1c3abd0dd8c266c065a983f8ad"} err="failed to get container status \"23a7034862058092867ea27f17beafab46471d1c3abd0dd8c266c065a983f8ad\": rpc error: code = NotFound desc = could not find container \"23a7034862058092867ea27f17beafab46471d1c3abd0dd8c266c065a983f8ad\": container with ID starting with 23a7034862058092867ea27f17beafab46471d1c3abd0dd8c266c065a983f8ad not found: ID does not exist" Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.255160 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7wkt5" event={"ID":"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c","Type":"ContainerStarted","Data":"12a21cea1c44a27bdf8a7aae6dc8472d5cdaacc929ffb6d0f22fe34d59d2cb17"} Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.257885 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87458b34-0f3f-430d-8c93-a3138854fc20","Type":"ContainerStarted","Data":"50b347cf1e8ff1b33237d24553111ef83bb40511a82bb613da05aea1576fa22e"} Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.262557 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" event={"ID":"582ffc79-4923-4b65-a536-c2f757507a4f","Type":"ContainerStarted","Data":"ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db"} Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.262702 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.265349 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8e2bf00a-bdbe-4c59-b020-b1c3d96375f3","Type":"ContainerStarted","Data":"f050d8ec53db6356b697f828452667b809c94f1b25cebf85517eff480f239f96"} Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.269785 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d8b94f14-6cc4-4c21-969c-e1aeb3c199fe","Type":"ContainerStarted","Data":"553a0d516eb599b5131239b6f6ee075dc3a024b14421712d02b05dd6dd296c12"} Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.272997 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" event={"ID":"258215fe-726c-4a33-81f2-137aa706c6d1","Type":"ContainerStarted","Data":"7935614971ec1d015d8ef50139fe8d47252fe8fade922a779724bf2b0b418875"} Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.273349 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.278708 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ndv46" event={"ID":"c79e8746-0571-48ab-ad7d-94b92eadc07e","Type":"ContainerStarted","Data":"202fc5b4e2ba3cc09531389fb54e8a670e1cec4450f06e5efe34a5cd0374d145"} Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.282426 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=6.737025765 podStartE2EDuration="23.282410884s" podCreationTimestamp="2025-12-03 19:47:27 +0000 UTC" firstStartedPulling="2025-12-03 19:47:33.239993475 +0000 UTC m=+1069.202803731" lastFinishedPulling="2025-12-03 19:47:49.785378574 +0000 UTC m=+1085.748188850" observedRunningTime="2025-12-03 19:47:50.279914877 +0000 UTC m=+1086.242725143" watchObservedRunningTime="2025-12-03 19:47:50.282410884 +0000 UTC m=+1086.245221150" Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.308381 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" podStartSLOduration=15.308359755 podStartE2EDuration="15.308359755s" podCreationTimestamp="2025-12-03 19:47:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:47:50.307941184 +0000 UTC m=+1086.270751450" watchObservedRunningTime="2025-12-03 19:47:50.308359755 +0000 UTC m=+1086.271170021" Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.350155 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" podStartSLOduration=14.350136488 podStartE2EDuration="14.350136488s" podCreationTimestamp="2025-12-03 19:47:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:47:50.348122304 +0000 UTC m=+1086.310932570" watchObservedRunningTime="2025-12-03 19:47:50.350136488 +0000 UTC m=+1086.312946754" Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.350774 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.041047602 podStartE2EDuration="20.350770484s" podCreationTimestamp="2025-12-03 19:47:30 +0000 UTC" firstStartedPulling="2025-12-03 19:47:33.442206071 +0000 UTC m=+1069.405016337" lastFinishedPulling="2025-12-03 19:47:49.751928953 +0000 UTC m=+1085.714739219" observedRunningTime="2025-12-03 19:47:50.32996312 +0000 UTC m=+1086.292773406" watchObservedRunningTime="2025-12-03 19:47:50.350770484 +0000 UTC m=+1086.313580750" Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.380767 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-ndv46" podStartSLOduration=7.605281922 podStartE2EDuration="15.380729502s" podCreationTimestamp="2025-12-03 19:47:35 +0000 UTC" firstStartedPulling="2025-12-03 19:47:41.957459457 +0000 UTC m=+1077.920269723" lastFinishedPulling="2025-12-03 19:47:49.732907027 +0000 UTC m=+1085.695717303" observedRunningTime="2025-12-03 19:47:50.367895791 +0000 UTC m=+1086.330706057" watchObservedRunningTime="2025-12-03 19:47:50.380729502 +0000 UTC m=+1086.343539768" Dec 03 19:47:50 crc kubenswrapper[4916]: I1203 19:47:50.400690 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=22.117771977 podStartE2EDuration="30.400666513s" podCreationTimestamp="2025-12-03 19:47:20 +0000 UTC" firstStartedPulling="2025-12-03 19:47:33.149840054 +0000 UTC m=+1069.112650320" lastFinishedPulling="2025-12-03 19:47:41.43273458 +0000 UTC m=+1077.395544856" observedRunningTime="2025-12-03 19:47:50.396340468 +0000 UTC m=+1086.359150744" watchObservedRunningTime="2025-12-03 19:47:50.400666513 +0000 UTC m=+1086.363476779" Dec 03 19:47:51 crc kubenswrapper[4916]: I1203 19:47:51.290795 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7wkt5" event={"ID":"4180ae6f-d0a4-4af0-b89c-48ab118b3f8c","Type":"ContainerStarted","Data":"75164cc468c70040ae68f3d47e934103b40428761942737125f4d4a4914042a4"} Dec 03 19:47:51 crc kubenswrapper[4916]: I1203 19:47:51.327192 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-7wkt5" podStartSLOduration=16.265489907 podStartE2EDuration="24.327168041s" podCreationTimestamp="2025-12-03 19:47:27 +0000 UTC" firstStartedPulling="2025-12-03 19:47:33.370047239 +0000 UTC m=+1069.332857505" lastFinishedPulling="2025-12-03 19:47:41.431725373 +0000 UTC m=+1077.394535639" observedRunningTime="2025-12-03 19:47:51.315455989 +0000 UTC m=+1087.278266295" watchObservedRunningTime="2025-12-03 19:47:51.327168041 +0000 UTC m=+1087.289978347" Dec 03 19:47:52 crc kubenswrapper[4916]: I1203 19:47:52.185071 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:52 crc kubenswrapper[4916]: I1203 19:47:52.185450 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:52 crc kubenswrapper[4916]: I1203 19:47:52.214349 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:52 crc kubenswrapper[4916]: I1203 19:47:52.307322 4916 generic.go:334] "Generic (PLEG): container finished" podID="85db28fe-52b4-4feb-8461-8c7a7e6e5179" containerID="c7fbc3016577d447c728bca1789db7a3051e053f2d62f3cd930cf8afbe6de2e1" exitCode=0 Dec 03 19:47:52 crc kubenswrapper[4916]: I1203 19:47:52.307447 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"85db28fe-52b4-4feb-8461-8c7a7e6e5179","Type":"ContainerDied","Data":"c7fbc3016577d447c728bca1789db7a3051e053f2d62f3cd930cf8afbe6de2e1"} Dec 03 19:47:52 crc kubenswrapper[4916]: I1203 19:47:52.307944 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:52 crc kubenswrapper[4916]: I1203 19:47:52.307968 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:47:52 crc kubenswrapper[4916]: I1203 19:47:52.394876 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:52 crc kubenswrapper[4916]: I1203 19:47:52.445481 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.214708 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.279746 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.321780 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"85db28fe-52b4-4feb-8461-8c7a7e6e5179","Type":"ContainerStarted","Data":"63027b3df83ef39e703e9b8bc86ef6b87b21c201490bbcf33da58055478e30b0"} Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.322338 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.343168 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=25.567980432 podStartE2EDuration="34.34314661s" podCreationTimestamp="2025-12-03 19:47:19 +0000 UTC" firstStartedPulling="2025-12-03 19:47:32.990323955 +0000 UTC m=+1068.953134221" lastFinishedPulling="2025-12-03 19:47:41.765490133 +0000 UTC m=+1077.728300399" observedRunningTime="2025-12-03 19:47:53.342187185 +0000 UTC m=+1089.304997471" watchObservedRunningTime="2025-12-03 19:47:53.34314661 +0000 UTC m=+1089.305956886" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.387659 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.393804 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.779706 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 03 19:47:53 crc kubenswrapper[4916]: E1203 19:47:53.780053 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b4a627a-cfa7-4e96-9563-5e859ad5525a" containerName="dnsmasq-dns" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.780068 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b4a627a-cfa7-4e96-9563-5e859ad5525a" containerName="dnsmasq-dns" Dec 03 19:47:53 crc kubenswrapper[4916]: E1203 19:47:53.780084 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5c9236a-7a8f-4fd9-9831-2370c10ab466" containerName="dnsmasq-dns" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.780092 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5c9236a-7a8f-4fd9-9831-2370c10ab466" containerName="dnsmasq-dns" Dec 03 19:47:53 crc kubenswrapper[4916]: E1203 19:47:53.780111 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5c9236a-7a8f-4fd9-9831-2370c10ab466" containerName="init" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.780118 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5c9236a-7a8f-4fd9-9831-2370c10ab466" containerName="init" Dec 03 19:47:53 crc kubenswrapper[4916]: E1203 19:47:53.780138 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b4a627a-cfa7-4e96-9563-5e859ad5525a" containerName="init" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.780146 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b4a627a-cfa7-4e96-9563-5e859ad5525a" containerName="init" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.780326 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5c9236a-7a8f-4fd9-9831-2370c10ab466" containerName="dnsmasq-dns" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.780340 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b4a627a-cfa7-4e96-9563-5e859ad5525a" containerName="dnsmasq-dns" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.781443 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.784619 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-jxxrf" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.784917 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.785116 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.787877 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.833397 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.877030 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ec7f91f3-02c9-42a9-b415-aa58806d9b17-scripts\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.877082 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7cjg\" (UniqueName: \"kubernetes.io/projected/ec7f91f3-02c9-42a9-b415-aa58806d9b17-kube-api-access-g7cjg\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.877107 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec7f91f3-02c9-42a9-b415-aa58806d9b17-config\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.877141 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec7f91f3-02c9-42a9-b415-aa58806d9b17-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.877452 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec7f91f3-02c9-42a9-b415-aa58806d9b17-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.877541 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ec7f91f3-02c9-42a9-b415-aa58806d9b17-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.877691 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7f91f3-02c9-42a9-b415-aa58806d9b17-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.979481 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7cjg\" (UniqueName: \"kubernetes.io/projected/ec7f91f3-02c9-42a9-b415-aa58806d9b17-kube-api-access-g7cjg\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.979533 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec7f91f3-02c9-42a9-b415-aa58806d9b17-config\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.979591 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec7f91f3-02c9-42a9-b415-aa58806d9b17-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.979667 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec7f91f3-02c9-42a9-b415-aa58806d9b17-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.979705 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ec7f91f3-02c9-42a9-b415-aa58806d9b17-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.979745 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7f91f3-02c9-42a9-b415-aa58806d9b17-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.979776 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ec7f91f3-02c9-42a9-b415-aa58806d9b17-scripts\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.980366 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ec7f91f3-02c9-42a9-b415-aa58806d9b17-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.980897 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ec7f91f3-02c9-42a9-b415-aa58806d9b17-scripts\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.980904 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec7f91f3-02c9-42a9-b415-aa58806d9b17-config\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.986163 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec7f91f3-02c9-42a9-b415-aa58806d9b17-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.986216 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec7f91f3-02c9-42a9-b415-aa58806d9b17-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:53 crc kubenswrapper[4916]: I1203 19:47:53.993409 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec7f91f3-02c9-42a9-b415-aa58806d9b17-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.012666 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7cjg\" (UniqueName: \"kubernetes.io/projected/ec7f91f3-02c9-42a9-b415-aa58806d9b17-kube-api-access-g7cjg\") pod \"ovn-northd-0\" (UID: \"ec7f91f3-02c9-42a9-b415-aa58806d9b17\") " pod="openstack/ovn-northd-0" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.111287 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.586045 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.664436 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.813127 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-7lxt2"] Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.813357 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" podUID="582ffc79-4923-4b65-a536-c2f757507a4f" containerName="dnsmasq-dns" containerID="cri-o://ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db" gracePeriod=10 Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.814687 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.858094 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-cx5z4"] Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.859467 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.875303 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-cx5z4"] Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.892291 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtl6h\" (UniqueName: \"kubernetes.io/projected/75d64faf-6ddb-49fc-8eda-03cc5c10233e-kube-api-access-wtl6h\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.892426 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.892455 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-dns-svc\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.892496 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-config\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.892539 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.906477 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.994238 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-config\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.994287 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.994362 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtl6h\" (UniqueName: \"kubernetes.io/projected/75d64faf-6ddb-49fc-8eda-03cc5c10233e-kube-api-access-wtl6h\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.994427 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.994461 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-dns-svc\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.995317 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.995341 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-dns-svc\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.995883 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:54 crc kubenswrapper[4916]: I1203 19:47:54.996277 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-config\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.019424 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtl6h\" (UniqueName: \"kubernetes.io/projected/75d64faf-6ddb-49fc-8eda-03cc5c10233e-kube-api-access-wtl6h\") pod \"dnsmasq-dns-698758b865-cx5z4\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.066005 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.211034 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.325441 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.355136 4916 generic.go:334] "Generic (PLEG): container finished" podID="582ffc79-4923-4b65-a536-c2f757507a4f" containerID="ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db" exitCode=0 Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.355224 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" event={"ID":"582ffc79-4923-4b65-a536-c2f757507a4f","Type":"ContainerDied","Data":"ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db"} Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.355253 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" event={"ID":"582ffc79-4923-4b65-a536-c2f757507a4f","Type":"ContainerDied","Data":"1667e9bc4b32616a57ccb79ef0b58ff3d5f65d15b2bdb82d5bf3e0da64835a5f"} Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.355269 4916 scope.go:117] "RemoveContainer" containerID="ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.355397 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-7lxt2" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.360618 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"ec7f91f3-02c9-42a9-b415-aa58806d9b17","Type":"ContainerStarted","Data":"06fef1c8d49fc24f7047f8e5be8042feedfe7db53f55848feca86d3a080b8e6e"} Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.404754 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-config\") pod \"582ffc79-4923-4b65-a536-c2f757507a4f\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.405499 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-ovsdbserver-nb\") pod \"582ffc79-4923-4b65-a536-c2f757507a4f\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.405639 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-dns-svc\") pod \"582ffc79-4923-4b65-a536-c2f757507a4f\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.405664 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m89r8\" (UniqueName: \"kubernetes.io/projected/582ffc79-4923-4b65-a536-c2f757507a4f-kube-api-access-m89r8\") pod \"582ffc79-4923-4b65-a536-c2f757507a4f\" (UID: \"582ffc79-4923-4b65-a536-c2f757507a4f\") " Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.411942 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/582ffc79-4923-4b65-a536-c2f757507a4f-kube-api-access-m89r8" (OuterVolumeSpecName: "kube-api-access-m89r8") pod "582ffc79-4923-4b65-a536-c2f757507a4f" (UID: "582ffc79-4923-4b65-a536-c2f757507a4f"). InnerVolumeSpecName "kube-api-access-m89r8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.418116 4916 scope.go:117] "RemoveContainer" containerID="4bf450e40963d45863a6ad289d5a2655ec631e7d7cd69f70afa7b9123330b3ca" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.473784 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-config" (OuterVolumeSpecName: "config") pod "582ffc79-4923-4b65-a536-c2f757507a4f" (UID: "582ffc79-4923-4b65-a536-c2f757507a4f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.476705 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "582ffc79-4923-4b65-a536-c2f757507a4f" (UID: "582ffc79-4923-4b65-a536-c2f757507a4f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.482160 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "582ffc79-4923-4b65-a536-c2f757507a4f" (UID: "582ffc79-4923-4b65-a536-c2f757507a4f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.483054 4916 scope.go:117] "RemoveContainer" containerID="ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db" Dec 03 19:47:55 crc kubenswrapper[4916]: E1203 19:47:55.483492 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db\": container with ID starting with ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db not found: ID does not exist" containerID="ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.483527 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db"} err="failed to get container status \"ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db\": rpc error: code = NotFound desc = could not find container \"ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db\": container with ID starting with ceeab060f02b838910d42f503d4dc521d97f3806eeb8978547b94258d4a417db not found: ID does not exist" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.483552 4916 scope.go:117] "RemoveContainer" containerID="4bf450e40963d45863a6ad289d5a2655ec631e7d7cd69f70afa7b9123330b3ca" Dec 03 19:47:55 crc kubenswrapper[4916]: E1203 19:47:55.484008 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bf450e40963d45863a6ad289d5a2655ec631e7d7cd69f70afa7b9123330b3ca\": container with ID starting with 4bf450e40963d45863a6ad289d5a2655ec631e7d7cd69f70afa7b9123330b3ca not found: ID does not exist" containerID="4bf450e40963d45863a6ad289d5a2655ec631e7d7cd69f70afa7b9123330b3ca" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.484041 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bf450e40963d45863a6ad289d5a2655ec631e7d7cd69f70afa7b9123330b3ca"} err="failed to get container status \"4bf450e40963d45863a6ad289d5a2655ec631e7d7cd69f70afa7b9123330b3ca\": rpc error: code = NotFound desc = could not find container \"4bf450e40963d45863a6ad289d5a2655ec631e7d7cd69f70afa7b9123330b3ca\": container with ID starting with 4bf450e40963d45863a6ad289d5a2655ec631e7d7cd69f70afa7b9123330b3ca not found: ID does not exist" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.515078 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.515128 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.515145 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/582ffc79-4923-4b65-a536-c2f757507a4f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.515158 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m89r8\" (UniqueName: \"kubernetes.io/projected/582ffc79-4923-4b65-a536-c2f757507a4f-kube-api-access-m89r8\") on node \"crc\" DevicePath \"\"" Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.692888 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-7lxt2"] Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.700326 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-7lxt2"] Dec 03 19:47:55 crc kubenswrapper[4916]: I1203 19:47:55.708397 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-cx5z4"] Dec 03 19:47:55 crc kubenswrapper[4916]: W1203 19:47:55.871498 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75d64faf_6ddb_49fc_8eda_03cc5c10233e.slice/crio-b40f84560cd10635e85f2c66411e958b8548a0498e28164248acd3f06a909682 WatchSource:0}: Error finding container b40f84560cd10635e85f2c66411e958b8548a0498e28164248acd3f06a909682: Status 404 returned error can't find the container with id b40f84560cd10635e85f2c66411e958b8548a0498e28164248acd3f06a909682 Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.008635 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 03 19:47:56 crc kubenswrapper[4916]: E1203 19:47:56.009206 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="582ffc79-4923-4b65-a536-c2f757507a4f" containerName="dnsmasq-dns" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.009225 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="582ffc79-4923-4b65-a536-c2f757507a4f" containerName="dnsmasq-dns" Dec 03 19:47:56 crc kubenswrapper[4916]: E1203 19:47:56.009235 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="582ffc79-4923-4b65-a536-c2f757507a4f" containerName="init" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.009241 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="582ffc79-4923-4b65-a536-c2f757507a4f" containerName="init" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.009862 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="582ffc79-4923-4b65-a536-c2f757507a4f" containerName="dnsmasq-dns" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.015638 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.023495 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.024532 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.024832 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-tfv8l" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.024959 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.034955 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.129371 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-lock\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.129463 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.129558 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.129737 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95w9l\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-kube-api-access-95w9l\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.129896 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-cache\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.232680 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-lock\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.232749 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.232810 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.232854 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95w9l\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-kube-api-access-95w9l\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.232987 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-cache\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: E1203 19:47:56.233226 4916 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.233336 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-lock\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: E1203 19:47:56.233348 4916 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 19:47:56 crc kubenswrapper[4916]: E1203 19:47:56.233443 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift podName:bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5 nodeName:}" failed. No retries permitted until 2025-12-03 19:47:56.733421606 +0000 UTC m=+1092.696231952 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift") pod "swift-storage-0" (UID: "bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5") : configmap "swift-ring-files" not found Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.233841 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-cache\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.233949 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.257419 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95w9l\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-kube-api-access-95w9l\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.270392 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.367252 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-cx5z4" event={"ID":"75d64faf-6ddb-49fc-8eda-03cc5c10233e","Type":"ContainerStarted","Data":"b40f84560cd10635e85f2c66411e958b8548a0498e28164248acd3f06a909682"} Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.389521 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.489267 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="582ffc79-4923-4b65-a536-c2f757507a4f" path="/var/lib/kubelet/pods/582ffc79-4923-4b65-a536-c2f757507a4f/volumes" Dec 03 19:47:56 crc kubenswrapper[4916]: I1203 19:47:56.741973 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:56 crc kubenswrapper[4916]: E1203 19:47:56.742233 4916 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 19:47:56 crc kubenswrapper[4916]: E1203 19:47:56.742273 4916 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 19:47:56 crc kubenswrapper[4916]: E1203 19:47:56.742360 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift podName:bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5 nodeName:}" failed. No retries permitted until 2025-12-03 19:47:57.742331852 +0000 UTC m=+1093.705142158 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift") pod "swift-storage-0" (UID: "bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5") : configmap "swift-ring-files" not found Dec 03 19:47:57 crc kubenswrapper[4916]: I1203 19:47:57.379555 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"ec7f91f3-02c9-42a9-b415-aa58806d9b17","Type":"ContainerStarted","Data":"b627b93afa610a42bf2bb43abc33087d5f0baab3d55b7fec7fb0ec5e48d12ce5"} Dec 03 19:47:57 crc kubenswrapper[4916]: I1203 19:47:57.379681 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"ec7f91f3-02c9-42a9-b415-aa58806d9b17","Type":"ContainerStarted","Data":"a939e70646be1e450d519a97b586e60176a6bd0c6c2fd3a8f9f876076c504366"} Dec 03 19:47:57 crc kubenswrapper[4916]: I1203 19:47:57.379797 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 03 19:47:57 crc kubenswrapper[4916]: I1203 19:47:57.381161 4916 generic.go:334] "Generic (PLEG): container finished" podID="75d64faf-6ddb-49fc-8eda-03cc5c10233e" containerID="14f535fa5e6b674e4a9df31087f5b7fffa4c1f73814d7dff047acbf7fbd1f78a" exitCode=0 Dec 03 19:47:57 crc kubenswrapper[4916]: I1203 19:47:57.381198 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-cx5z4" event={"ID":"75d64faf-6ddb-49fc-8eda-03cc5c10233e","Type":"ContainerDied","Data":"14f535fa5e6b674e4a9df31087f5b7fffa4c1f73814d7dff047acbf7fbd1f78a"} Dec 03 19:47:57 crc kubenswrapper[4916]: I1203 19:47:57.409694 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.068534366 podStartE2EDuration="4.409671598s" podCreationTimestamp="2025-12-03 19:47:53 +0000 UTC" firstStartedPulling="2025-12-03 19:47:54.584708041 +0000 UTC m=+1090.547518327" lastFinishedPulling="2025-12-03 19:47:55.925845293 +0000 UTC m=+1091.888655559" observedRunningTime="2025-12-03 19:47:57.404299615 +0000 UTC m=+1093.367109881" watchObservedRunningTime="2025-12-03 19:47:57.409671598 +0000 UTC m=+1093.372481864" Dec 03 19:47:57 crc kubenswrapper[4916]: I1203 19:47:57.757508 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:57 crc kubenswrapper[4916]: E1203 19:47:57.757707 4916 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 19:47:57 crc kubenswrapper[4916]: E1203 19:47:57.757729 4916 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 19:47:57 crc kubenswrapper[4916]: E1203 19:47:57.757786 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift podName:bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5 nodeName:}" failed. No retries permitted until 2025-12-03 19:47:59.75776894 +0000 UTC m=+1095.720579216 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift") pod "swift-storage-0" (UID: "bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5") : configmap "swift-ring-files" not found Dec 03 19:47:58 crc kubenswrapper[4916]: I1203 19:47:58.393929 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-cx5z4" event={"ID":"75d64faf-6ddb-49fc-8eda-03cc5c10233e","Type":"ContainerStarted","Data":"1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a"} Dec 03 19:47:58 crc kubenswrapper[4916]: I1203 19:47:58.431473 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-cx5z4" podStartSLOduration=4.431442443 podStartE2EDuration="4.431442443s" podCreationTimestamp="2025-12-03 19:47:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:47:58.419458764 +0000 UTC m=+1094.382269130" watchObservedRunningTime="2025-12-03 19:47:58.431442443 +0000 UTC m=+1094.394252759" Dec 03 19:47:59 crc kubenswrapper[4916]: I1203 19:47:59.404266 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:47:59 crc kubenswrapper[4916]: I1203 19:47:59.798398 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:47:59 crc kubenswrapper[4916]: E1203 19:47:59.798700 4916 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 19:47:59 crc kubenswrapper[4916]: E1203 19:47:59.798762 4916 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 19:47:59 crc kubenswrapper[4916]: E1203 19:47:59.798869 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift podName:bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5 nodeName:}" failed. No retries permitted until 2025-12-03 19:48:03.798833436 +0000 UTC m=+1099.761643742 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift") pod "swift-storage-0" (UID: "bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5") : configmap "swift-ring-files" not found Dec 03 19:47:59 crc kubenswrapper[4916]: I1203 19:47:59.943353 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-9zsgq"] Dec 03 19:47:59 crc kubenswrapper[4916]: I1203 19:47:59.944940 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:47:59 crc kubenswrapper[4916]: I1203 19:47:59.947140 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 19:47:59 crc kubenswrapper[4916]: I1203 19:47:59.948908 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 03 19:47:59 crc kubenswrapper[4916]: I1203 19:47:59.949785 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 03 19:47:59 crc kubenswrapper[4916]: I1203 19:47:59.958834 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-9zsgq"] Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.001800 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-swiftconf\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.001842 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3c809170-1425-4dac-9f8f-7a4b395315e7-etc-swift\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.001908 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-ring-data-devices\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.001968 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-combined-ca-bundle\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.001996 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-scripts\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.002034 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h64gc\" (UniqueName: \"kubernetes.io/projected/3c809170-1425-4dac-9f8f-7a4b395315e7-kube-api-access-h64gc\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.002082 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-dispersionconf\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.005343 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-wfwfd"] Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.006468 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.026052 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-9zsgq"] Dec 03 19:48:00 crc kubenswrapper[4916]: E1203 19:48:00.026503 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-h64gc ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-9zsgq" podUID="3c809170-1425-4dac-9f8f-7a4b395315e7" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.037746 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-wfwfd"] Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.103759 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-ring-data-devices\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.103823 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/955b2a04-73e1-4ab5-b322-e301684e8785-etc-swift\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.103854 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-combined-ca-bundle\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.103875 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-ring-data-devices\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.103896 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-scripts\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.103926 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h64gc\" (UniqueName: \"kubernetes.io/projected/3c809170-1425-4dac-9f8f-7a4b395315e7-kube-api-access-h64gc\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.103987 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-swiftconf\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.104018 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-combined-ca-bundle\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.104038 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-dispersionconf\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.104071 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcd57\" (UniqueName: \"kubernetes.io/projected/955b2a04-73e1-4ab5-b322-e301684e8785-kube-api-access-lcd57\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.104091 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-dispersionconf\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.104112 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-scripts\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.104128 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-swiftconf\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.104144 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3c809170-1425-4dac-9f8f-7a4b395315e7-etc-swift\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.104552 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3c809170-1425-4dac-9f8f-7a4b395315e7-etc-swift\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.105062 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-ring-data-devices\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.106862 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-scripts\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.111645 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-dispersionconf\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.114205 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-combined-ca-bundle\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.114256 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-swiftconf\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.126215 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h64gc\" (UniqueName: \"kubernetes.io/projected/3c809170-1425-4dac-9f8f-7a4b395315e7-kube-api-access-h64gc\") pod \"swift-ring-rebalance-9zsgq\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.205237 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-combined-ca-bundle\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.205361 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcd57\" (UniqueName: \"kubernetes.io/projected/955b2a04-73e1-4ab5-b322-e301684e8785-kube-api-access-lcd57\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.205386 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-dispersionconf\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.205413 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-scripts\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.205515 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/955b2a04-73e1-4ab5-b322-e301684e8785-etc-swift\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.205556 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-ring-data-devices\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.205644 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-swiftconf\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.206499 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/955b2a04-73e1-4ab5-b322-e301684e8785-etc-swift\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.206596 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-ring-data-devices\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.208420 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-scripts\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.212022 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-swiftconf\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.212983 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-combined-ca-bundle\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.213577 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-dispersionconf\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.226446 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcd57\" (UniqueName: \"kubernetes.io/projected/955b2a04-73e1-4ab5-b322-e301684e8785-kube-api-access-lcd57\") pod \"swift-ring-rebalance-wfwfd\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.326269 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.414889 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.448125 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.509423 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-ring-data-devices\") pod \"3c809170-1425-4dac-9f8f-7a4b395315e7\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.509488 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-scripts\") pod \"3c809170-1425-4dac-9f8f-7a4b395315e7\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.509800 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-swiftconf\") pod \"3c809170-1425-4dac-9f8f-7a4b395315e7\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.509919 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-dispersionconf\") pod \"3c809170-1425-4dac-9f8f-7a4b395315e7\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.509967 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h64gc\" (UniqueName: \"kubernetes.io/projected/3c809170-1425-4dac-9f8f-7a4b395315e7-kube-api-access-h64gc\") pod \"3c809170-1425-4dac-9f8f-7a4b395315e7\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.510014 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-combined-ca-bundle\") pod \"3c809170-1425-4dac-9f8f-7a4b395315e7\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.510050 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3c809170-1425-4dac-9f8f-7a4b395315e7-etc-swift\") pod \"3c809170-1425-4dac-9f8f-7a4b395315e7\" (UID: \"3c809170-1425-4dac-9f8f-7a4b395315e7\") " Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.510167 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "3c809170-1425-4dac-9f8f-7a4b395315e7" (UID: "3c809170-1425-4dac-9f8f-7a4b395315e7"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.510384 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c809170-1425-4dac-9f8f-7a4b395315e7-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "3c809170-1425-4dac-9f8f-7a4b395315e7" (UID: "3c809170-1425-4dac-9f8f-7a4b395315e7"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.510489 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-scripts" (OuterVolumeSpecName: "scripts") pod "3c809170-1425-4dac-9f8f-7a4b395315e7" (UID: "3c809170-1425-4dac-9f8f-7a4b395315e7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.511237 4916 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3c809170-1425-4dac-9f8f-7a4b395315e7-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.511353 4916 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.511394 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3c809170-1425-4dac-9f8f-7a4b395315e7-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.515696 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "3c809170-1425-4dac-9f8f-7a4b395315e7" (UID: "3c809170-1425-4dac-9f8f-7a4b395315e7"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.515740 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c809170-1425-4dac-9f8f-7a4b395315e7-kube-api-access-h64gc" (OuterVolumeSpecName: "kube-api-access-h64gc") pod "3c809170-1425-4dac-9f8f-7a4b395315e7" (UID: "3c809170-1425-4dac-9f8f-7a4b395315e7"). InnerVolumeSpecName "kube-api-access-h64gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.515904 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c809170-1425-4dac-9f8f-7a4b395315e7" (UID: "3c809170-1425-4dac-9f8f-7a4b395315e7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.516222 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "3c809170-1425-4dac-9f8f-7a4b395315e7" (UID: "3c809170-1425-4dac-9f8f-7a4b395315e7"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.590770 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-wfwfd"] Dec 03 19:48:00 crc kubenswrapper[4916]: W1203 19:48:00.592807 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod955b2a04_73e1_4ab5_b322_e301684e8785.slice/crio-158f7d53c92c500a6292a07dace7b7b57444e86725dfc918c2823192e34ecab8 WatchSource:0}: Error finding container 158f7d53c92c500a6292a07dace7b7b57444e86725dfc918c2823192e34ecab8: Status 404 returned error can't find the container with id 158f7d53c92c500a6292a07dace7b7b57444e86725dfc918c2823192e34ecab8 Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.612443 4916 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.612474 4916 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.612484 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h64gc\" (UniqueName: \"kubernetes.io/projected/3c809170-1425-4dac-9f8f-7a4b395315e7-kube-api-access-h64gc\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.612495 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c809170-1425-4dac-9f8f-7a4b395315e7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.640847 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.640938 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 03 19:48:00 crc kubenswrapper[4916]: I1203 19:48:00.739253 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 03 19:48:01 crc kubenswrapper[4916]: I1203 19:48:01.434552 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-wfwfd" event={"ID":"955b2a04-73e1-4ab5-b322-e301684e8785","Type":"ContainerStarted","Data":"158f7d53c92c500a6292a07dace7b7b57444e86725dfc918c2823192e34ecab8"} Dec 03 19:48:01 crc kubenswrapper[4916]: I1203 19:48:01.434732 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9zsgq" Dec 03 19:48:01 crc kubenswrapper[4916]: I1203 19:48:01.490614 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-9zsgq"] Dec 03 19:48:01 crc kubenswrapper[4916]: I1203 19:48:01.497887 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-9zsgq"] Dec 03 19:48:01 crc kubenswrapper[4916]: I1203 19:48:01.522884 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.141593 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-e728-account-create-update-wgg8d"] Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.143122 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-e728-account-create-update-wgg8d" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.148513 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-e728-account-create-update-wgg8d"] Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.186595 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.231646 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-wszhq"] Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.232824 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wszhq" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.257688 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-wszhq"] Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.286611 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c646c354-3c53-407c-ae77-4af980d70094-operator-scripts\") pod \"keystone-db-create-wszhq\" (UID: \"c646c354-3c53-407c-ae77-4af980d70094\") " pod="openstack/keystone-db-create-wszhq" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.286687 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jghh6\" (UniqueName: \"kubernetes.io/projected/c646c354-3c53-407c-ae77-4af980d70094-kube-api-access-jghh6\") pod \"keystone-db-create-wszhq\" (UID: \"c646c354-3c53-407c-ae77-4af980d70094\") " pod="openstack/keystone-db-create-wszhq" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.286741 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6291ea12-342b-49d8-aa3c-671573f55c06-operator-scripts\") pod \"keystone-e728-account-create-update-wgg8d\" (UID: \"6291ea12-342b-49d8-aa3c-671573f55c06\") " pod="openstack/keystone-e728-account-create-update-wgg8d" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.287422 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvh2r\" (UniqueName: \"kubernetes.io/projected/6291ea12-342b-49d8-aa3c-671573f55c06-kube-api-access-tvh2r\") pod \"keystone-e728-account-create-update-wgg8d\" (UID: \"6291ea12-342b-49d8-aa3c-671573f55c06\") " pod="openstack/keystone-e728-account-create-update-wgg8d" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.388790 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jghh6\" (UniqueName: \"kubernetes.io/projected/c646c354-3c53-407c-ae77-4af980d70094-kube-api-access-jghh6\") pod \"keystone-db-create-wszhq\" (UID: \"c646c354-3c53-407c-ae77-4af980d70094\") " pod="openstack/keystone-db-create-wszhq" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.388903 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6291ea12-342b-49d8-aa3c-671573f55c06-operator-scripts\") pod \"keystone-e728-account-create-update-wgg8d\" (UID: \"6291ea12-342b-49d8-aa3c-671573f55c06\") " pod="openstack/keystone-e728-account-create-update-wgg8d" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.389074 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvh2r\" (UniqueName: \"kubernetes.io/projected/6291ea12-342b-49d8-aa3c-671573f55c06-kube-api-access-tvh2r\") pod \"keystone-e728-account-create-update-wgg8d\" (UID: \"6291ea12-342b-49d8-aa3c-671573f55c06\") " pod="openstack/keystone-e728-account-create-update-wgg8d" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.389189 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c646c354-3c53-407c-ae77-4af980d70094-operator-scripts\") pod \"keystone-db-create-wszhq\" (UID: \"c646c354-3c53-407c-ae77-4af980d70094\") " pod="openstack/keystone-db-create-wszhq" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.390620 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c646c354-3c53-407c-ae77-4af980d70094-operator-scripts\") pod \"keystone-db-create-wszhq\" (UID: \"c646c354-3c53-407c-ae77-4af980d70094\") " pod="openstack/keystone-db-create-wszhq" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.392069 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6291ea12-342b-49d8-aa3c-671573f55c06-operator-scripts\") pod \"keystone-e728-account-create-update-wgg8d\" (UID: \"6291ea12-342b-49d8-aa3c-671573f55c06\") " pod="openstack/keystone-e728-account-create-update-wgg8d" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.408699 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvh2r\" (UniqueName: \"kubernetes.io/projected/6291ea12-342b-49d8-aa3c-671573f55c06-kube-api-access-tvh2r\") pod \"keystone-e728-account-create-update-wgg8d\" (UID: \"6291ea12-342b-49d8-aa3c-671573f55c06\") " pod="openstack/keystone-e728-account-create-update-wgg8d" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.408817 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jghh6\" (UniqueName: \"kubernetes.io/projected/c646c354-3c53-407c-ae77-4af980d70094-kube-api-access-jghh6\") pod \"keystone-db-create-wszhq\" (UID: \"c646c354-3c53-407c-ae77-4af980d70094\") " pod="openstack/keystone-db-create-wszhq" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.459480 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-q8p4t"] Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.461930 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-q8p4t" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.486388 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c809170-1425-4dac-9f8f-7a4b395315e7" path="/var/lib/kubelet/pods/3c809170-1425-4dac-9f8f-7a4b395315e7/volumes" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.486938 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-q8p4t"] Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.514538 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-e728-account-create-update-wgg8d" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.528179 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-3026-account-create-update-c96hn"] Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.529135 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3026-account-create-update-c96hn" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.530991 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.541793 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-3026-account-create-update-c96hn"] Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.549461 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wszhq" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.592729 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50419ef1-71cf-4f8a-a74d-48a708e15785-operator-scripts\") pod \"placement-db-create-q8p4t\" (UID: \"50419ef1-71cf-4f8a-a74d-48a708e15785\") " pod="openstack/placement-db-create-q8p4t" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.593084 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-operator-scripts\") pod \"placement-3026-account-create-update-c96hn\" (UID: \"eaaed8df-f339-4fc0-a76e-be13e78ef8fd\") " pod="openstack/placement-3026-account-create-update-c96hn" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.593158 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d59p\" (UniqueName: \"kubernetes.io/projected/50419ef1-71cf-4f8a-a74d-48a708e15785-kube-api-access-8d59p\") pod \"placement-db-create-q8p4t\" (UID: \"50419ef1-71cf-4f8a-a74d-48a708e15785\") " pod="openstack/placement-db-create-q8p4t" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.593243 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sl87m\" (UniqueName: \"kubernetes.io/projected/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-kube-api-access-sl87m\") pod \"placement-3026-account-create-update-c96hn\" (UID: \"eaaed8df-f339-4fc0-a76e-be13e78ef8fd\") " pod="openstack/placement-3026-account-create-update-c96hn" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.695039 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-operator-scripts\") pod \"placement-3026-account-create-update-c96hn\" (UID: \"eaaed8df-f339-4fc0-a76e-be13e78ef8fd\") " pod="openstack/placement-3026-account-create-update-c96hn" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.695522 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d59p\" (UniqueName: \"kubernetes.io/projected/50419ef1-71cf-4f8a-a74d-48a708e15785-kube-api-access-8d59p\") pod \"placement-db-create-q8p4t\" (UID: \"50419ef1-71cf-4f8a-a74d-48a708e15785\") " pod="openstack/placement-db-create-q8p4t" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.695792 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-operator-scripts\") pod \"placement-3026-account-create-update-c96hn\" (UID: \"eaaed8df-f339-4fc0-a76e-be13e78ef8fd\") " pod="openstack/placement-3026-account-create-update-c96hn" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.696001 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sl87m\" (UniqueName: \"kubernetes.io/projected/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-kube-api-access-sl87m\") pod \"placement-3026-account-create-update-c96hn\" (UID: \"eaaed8df-f339-4fc0-a76e-be13e78ef8fd\") " pod="openstack/placement-3026-account-create-update-c96hn" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.696265 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50419ef1-71cf-4f8a-a74d-48a708e15785-operator-scripts\") pod \"placement-db-create-q8p4t\" (UID: \"50419ef1-71cf-4f8a-a74d-48a708e15785\") " pod="openstack/placement-db-create-q8p4t" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.696991 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50419ef1-71cf-4f8a-a74d-48a708e15785-operator-scripts\") pod \"placement-db-create-q8p4t\" (UID: \"50419ef1-71cf-4f8a-a74d-48a708e15785\") " pod="openstack/placement-db-create-q8p4t" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.714477 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d59p\" (UniqueName: \"kubernetes.io/projected/50419ef1-71cf-4f8a-a74d-48a708e15785-kube-api-access-8d59p\") pod \"placement-db-create-q8p4t\" (UID: \"50419ef1-71cf-4f8a-a74d-48a708e15785\") " pod="openstack/placement-db-create-q8p4t" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.717704 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sl87m\" (UniqueName: \"kubernetes.io/projected/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-kube-api-access-sl87m\") pod \"placement-3026-account-create-update-c96hn\" (UID: \"eaaed8df-f339-4fc0-a76e-be13e78ef8fd\") " pod="openstack/placement-3026-account-create-update-c96hn" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.796259 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-q8p4t" Dec 03 19:48:02 crc kubenswrapper[4916]: I1203 19:48:02.845253 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3026-account-create-update-c96hn" Dec 03 19:48:03 crc kubenswrapper[4916]: I1203 19:48:03.818547 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:48:03 crc kubenswrapper[4916]: E1203 19:48:03.819109 4916 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 19:48:03 crc kubenswrapper[4916]: E1203 19:48:03.819129 4916 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 19:48:03 crc kubenswrapper[4916]: E1203 19:48:03.819177 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift podName:bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5 nodeName:}" failed. No retries permitted until 2025-12-03 19:48:11.819161483 +0000 UTC m=+1107.781971759 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift") pod "swift-storage-0" (UID: "bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5") : configmap "swift-ring-files" not found Dec 03 19:48:04 crc kubenswrapper[4916]: I1203 19:48:04.472396 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-wfwfd" event={"ID":"955b2a04-73e1-4ab5-b322-e301684e8785","Type":"ContainerStarted","Data":"af5da4767ff26770d81f3617197ba09627db299ba3d2cebcf6373ebe333ff9ae"} Dec 03 19:48:04 crc kubenswrapper[4916]: I1203 19:48:04.511143 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-wszhq"] Dec 03 19:48:04 crc kubenswrapper[4916]: I1203 19:48:04.525089 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-wfwfd" podStartSLOduration=2.068366582 podStartE2EDuration="5.525066176s" podCreationTimestamp="2025-12-03 19:47:59 +0000 UTC" firstStartedPulling="2025-12-03 19:48:00.599947395 +0000 UTC m=+1096.562757661" lastFinishedPulling="2025-12-03 19:48:04.056646979 +0000 UTC m=+1100.019457255" observedRunningTime="2025-12-03 19:48:04.498382756 +0000 UTC m=+1100.461193022" watchObservedRunningTime="2025-12-03 19:48:04.525066176 +0000 UTC m=+1100.487876432" Dec 03 19:48:04 crc kubenswrapper[4916]: I1203 19:48:04.571692 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-3026-account-create-update-c96hn"] Dec 03 19:48:04 crc kubenswrapper[4916]: W1203 19:48:04.573695 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50419ef1_71cf_4f8a_a74d_48a708e15785.slice/crio-c59a358c2fc3103aac6d55461a7e6b704ab56d4a09297fca7a1fd98da5f1ef3e WatchSource:0}: Error finding container c59a358c2fc3103aac6d55461a7e6b704ab56d4a09297fca7a1fd98da5f1ef3e: Status 404 returned error can't find the container with id c59a358c2fc3103aac6d55461a7e6b704ab56d4a09297fca7a1fd98da5f1ef3e Dec 03 19:48:04 crc kubenswrapper[4916]: I1203 19:48:04.578830 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-q8p4t"] Dec 03 19:48:04 crc kubenswrapper[4916]: I1203 19:48:04.701163 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-e728-account-create-update-wgg8d"] Dec 03 19:48:04 crc kubenswrapper[4916]: W1203 19:48:04.710437 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6291ea12_342b_49d8_aa3c_671573f55c06.slice/crio-8eab6fd638d5cb8ac3ea51ab8824b71167fa8febb5b108cbd7c0e48dbfaafea2 WatchSource:0}: Error finding container 8eab6fd638d5cb8ac3ea51ab8824b71167fa8febb5b108cbd7c0e48dbfaafea2: Status 404 returned error can't find the container with id 8eab6fd638d5cb8ac3ea51ab8824b71167fa8febb5b108cbd7c0e48dbfaafea2 Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.214700 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.280491 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-fz592"] Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.280733 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" podUID="258215fe-726c-4a33-81f2-137aa706c6d1" containerName="dnsmasq-dns" containerID="cri-o://7935614971ec1d015d8ef50139fe8d47252fe8fade922a779724bf2b0b418875" gracePeriod=10 Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.491656 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-3026-account-create-update-c96hn" event={"ID":"eaaed8df-f339-4fc0-a76e-be13e78ef8fd","Type":"ContainerStarted","Data":"22bbbf064561daffc929f3b601affb8e9984182f7199e9b2e28f3c189a440968"} Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.491706 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-3026-account-create-update-c96hn" event={"ID":"eaaed8df-f339-4fc0-a76e-be13e78ef8fd","Type":"ContainerStarted","Data":"5dbd2f7ab5bbc2e295a3b89b7b8ff17e4314ed7fa40028129824b2e638437af1"} Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.493449 4916 generic.go:334] "Generic (PLEG): container finished" podID="258215fe-726c-4a33-81f2-137aa706c6d1" containerID="7935614971ec1d015d8ef50139fe8d47252fe8fade922a779724bf2b0b418875" exitCode=0 Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.493506 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" event={"ID":"258215fe-726c-4a33-81f2-137aa706c6d1","Type":"ContainerDied","Data":"7935614971ec1d015d8ef50139fe8d47252fe8fade922a779724bf2b0b418875"} Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.498067 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-e728-account-create-update-wgg8d" event={"ID":"6291ea12-342b-49d8-aa3c-671573f55c06","Type":"ContainerStarted","Data":"aba7d8ec5fb77b04be08f46e58d1616e2b916a918a31dd367327793d4f6952c7"} Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.498104 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-e728-account-create-update-wgg8d" event={"ID":"6291ea12-342b-49d8-aa3c-671573f55c06","Type":"ContainerStarted","Data":"8eab6fd638d5cb8ac3ea51ab8824b71167fa8febb5b108cbd7c0e48dbfaafea2"} Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.499979 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-q8p4t" event={"ID":"50419ef1-71cf-4f8a-a74d-48a708e15785","Type":"ContainerStarted","Data":"931ed90e3056a1e6708aa10dcac847f0dd8259eb6e4f7ebf9b8f35a3706805af"} Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.500026 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-q8p4t" event={"ID":"50419ef1-71cf-4f8a-a74d-48a708e15785","Type":"ContainerStarted","Data":"c59a358c2fc3103aac6d55461a7e6b704ab56d4a09297fca7a1fd98da5f1ef3e"} Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.502307 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wszhq" event={"ID":"c646c354-3c53-407c-ae77-4af980d70094","Type":"ContainerStarted","Data":"135964568bf6aabb64a18fc6920a2debd03b0df10a4e8139294c4e3d5168c572"} Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.502357 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wszhq" event={"ID":"c646c354-3c53-407c-ae77-4af980d70094","Type":"ContainerStarted","Data":"d40b523d85b74d9692d29f3056d36538d982820717dde5594deb852c90d9e797"} Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.549699 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-3026-account-create-update-c96hn" podStartSLOduration=3.549676598 podStartE2EDuration="3.549676598s" podCreationTimestamp="2025-12-03 19:48:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:05.525142574 +0000 UTC m=+1101.487952840" watchObservedRunningTime="2025-12-03 19:48:05.549676598 +0000 UTC m=+1101.512486864" Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.565818 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-wszhq" podStartSLOduration=3.565801177 podStartE2EDuration="3.565801177s" podCreationTimestamp="2025-12-03 19:48:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:05.545780064 +0000 UTC m=+1101.508590330" watchObservedRunningTime="2025-12-03 19:48:05.565801177 +0000 UTC m=+1101.528611443" Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.565913 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-q8p4t" podStartSLOduration=3.5659093200000003 podStartE2EDuration="3.56590932s" podCreationTimestamp="2025-12-03 19:48:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:05.562779917 +0000 UTC m=+1101.525590173" watchObservedRunningTime="2025-12-03 19:48:05.56590932 +0000 UTC m=+1101.528719586" Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.585950 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-e728-account-create-update-wgg8d" podStartSLOduration=3.585928473 podStartE2EDuration="3.585928473s" podCreationTimestamp="2025-12-03 19:48:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:05.579555284 +0000 UTC m=+1101.542365550" watchObservedRunningTime="2025-12-03 19:48:05.585928473 +0000 UTC m=+1101.548738739" Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.846650 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.963453 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-config\") pod \"258215fe-726c-4a33-81f2-137aa706c6d1\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.963509 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-dns-svc\") pod \"258215fe-726c-4a33-81f2-137aa706c6d1\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.963537 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-sb\") pod \"258215fe-726c-4a33-81f2-137aa706c6d1\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.963590 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-nb\") pod \"258215fe-726c-4a33-81f2-137aa706c6d1\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.963703 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lj2l\" (UniqueName: \"kubernetes.io/projected/258215fe-726c-4a33-81f2-137aa706c6d1-kube-api-access-8lj2l\") pod \"258215fe-726c-4a33-81f2-137aa706c6d1\" (UID: \"258215fe-726c-4a33-81f2-137aa706c6d1\") " Dec 03 19:48:05 crc kubenswrapper[4916]: I1203 19:48:05.968518 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/258215fe-726c-4a33-81f2-137aa706c6d1-kube-api-access-8lj2l" (OuterVolumeSpecName: "kube-api-access-8lj2l") pod "258215fe-726c-4a33-81f2-137aa706c6d1" (UID: "258215fe-726c-4a33-81f2-137aa706c6d1"). InnerVolumeSpecName "kube-api-access-8lj2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.006198 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "258215fe-726c-4a33-81f2-137aa706c6d1" (UID: "258215fe-726c-4a33-81f2-137aa706c6d1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.022407 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "258215fe-726c-4a33-81f2-137aa706c6d1" (UID: "258215fe-726c-4a33-81f2-137aa706c6d1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.026911 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-config" (OuterVolumeSpecName: "config") pod "258215fe-726c-4a33-81f2-137aa706c6d1" (UID: "258215fe-726c-4a33-81f2-137aa706c6d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.044748 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "258215fe-726c-4a33-81f2-137aa706c6d1" (UID: "258215fe-726c-4a33-81f2-137aa706c6d1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.065789 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lj2l\" (UniqueName: \"kubernetes.io/projected/258215fe-726c-4a33-81f2-137aa706c6d1-kube-api-access-8lj2l\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.065829 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.065842 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.065874 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.065888 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/258215fe-726c-4a33-81f2-137aa706c6d1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.515170 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" event={"ID":"258215fe-726c-4a33-81f2-137aa706c6d1","Type":"ContainerDied","Data":"cda3c70801c022ac0e6090f0da717985cfc599d5d442d69a95187e619fec8a91"} Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.515220 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-fz592" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.515233 4916 scope.go:117] "RemoveContainer" containerID="7935614971ec1d015d8ef50139fe8d47252fe8fade922a779724bf2b0b418875" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.517505 4916 generic.go:334] "Generic (PLEG): container finished" podID="6291ea12-342b-49d8-aa3c-671573f55c06" containerID="aba7d8ec5fb77b04be08f46e58d1616e2b916a918a31dd367327793d4f6952c7" exitCode=0 Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.517601 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-e728-account-create-update-wgg8d" event={"ID":"6291ea12-342b-49d8-aa3c-671573f55c06","Type":"ContainerDied","Data":"aba7d8ec5fb77b04be08f46e58d1616e2b916a918a31dd367327793d4f6952c7"} Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.519943 4916 generic.go:334] "Generic (PLEG): container finished" podID="50419ef1-71cf-4f8a-a74d-48a708e15785" containerID="931ed90e3056a1e6708aa10dcac847f0dd8259eb6e4f7ebf9b8f35a3706805af" exitCode=0 Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.520025 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-q8p4t" event={"ID":"50419ef1-71cf-4f8a-a74d-48a708e15785","Type":"ContainerDied","Data":"931ed90e3056a1e6708aa10dcac847f0dd8259eb6e4f7ebf9b8f35a3706805af"} Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.522838 4916 generic.go:334] "Generic (PLEG): container finished" podID="c646c354-3c53-407c-ae77-4af980d70094" containerID="135964568bf6aabb64a18fc6920a2debd03b0df10a4e8139294c4e3d5168c572" exitCode=0 Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.522896 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wszhq" event={"ID":"c646c354-3c53-407c-ae77-4af980d70094","Type":"ContainerDied","Data":"135964568bf6aabb64a18fc6920a2debd03b0df10a4e8139294c4e3d5168c572"} Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.533370 4916 generic.go:334] "Generic (PLEG): container finished" podID="eaaed8df-f339-4fc0-a76e-be13e78ef8fd" containerID="22bbbf064561daffc929f3b601affb8e9984182f7199e9b2e28f3c189a440968" exitCode=0 Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.533433 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-3026-account-create-update-c96hn" event={"ID":"eaaed8df-f339-4fc0-a76e-be13e78ef8fd","Type":"ContainerDied","Data":"22bbbf064561daffc929f3b601affb8e9984182f7199e9b2e28f3c189a440968"} Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.554342 4916 scope.go:117] "RemoveContainer" containerID="e932f3fdb6bee5b8d7e07b7652fa967b23186406edf46cba451207d8900c18d5" Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.603227 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-fz592"] Dec 03 19:48:06 crc kubenswrapper[4916]: I1203 19:48:06.614055 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-fz592"] Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.712882 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-hhxxm"] Dec 03 19:48:07 crc kubenswrapper[4916]: E1203 19:48:07.713354 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="258215fe-726c-4a33-81f2-137aa706c6d1" containerName="init" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.713454 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="258215fe-726c-4a33-81f2-137aa706c6d1" containerName="init" Dec 03 19:48:07 crc kubenswrapper[4916]: E1203 19:48:07.713491 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="258215fe-726c-4a33-81f2-137aa706c6d1" containerName="dnsmasq-dns" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.713500 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="258215fe-726c-4a33-81f2-137aa706c6d1" containerName="dnsmasq-dns" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.713744 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="258215fe-726c-4a33-81f2-137aa706c6d1" containerName="dnsmasq-dns" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.715594 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hhxxm" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.725764 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-hhxxm"] Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.805281 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w94b8\" (UniqueName: \"kubernetes.io/projected/e7651127-5741-4e95-8dc4-179999e506d8-kube-api-access-w94b8\") pod \"glance-db-create-hhxxm\" (UID: \"e7651127-5741-4e95-8dc4-179999e506d8\") " pod="openstack/glance-db-create-hhxxm" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.805377 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7651127-5741-4e95-8dc4-179999e506d8-operator-scripts\") pod \"glance-db-create-hhxxm\" (UID: \"e7651127-5741-4e95-8dc4-179999e506d8\") " pod="openstack/glance-db-create-hhxxm" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.815553 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-ab84-account-create-update-7hc2j"] Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.816916 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ab84-account-create-update-7hc2j" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.820979 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.823925 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-ab84-account-create-update-7hc2j"] Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.906630 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e85d895f-c6a8-45f7-a18e-369f4cd00079-operator-scripts\") pod \"glance-ab84-account-create-update-7hc2j\" (UID: \"e85d895f-c6a8-45f7-a18e-369f4cd00079\") " pod="openstack/glance-ab84-account-create-update-7hc2j" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.906669 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r2mj\" (UniqueName: \"kubernetes.io/projected/e85d895f-c6a8-45f7-a18e-369f4cd00079-kube-api-access-2r2mj\") pod \"glance-ab84-account-create-update-7hc2j\" (UID: \"e85d895f-c6a8-45f7-a18e-369f4cd00079\") " pod="openstack/glance-ab84-account-create-update-7hc2j" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.906728 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w94b8\" (UniqueName: \"kubernetes.io/projected/e7651127-5741-4e95-8dc4-179999e506d8-kube-api-access-w94b8\") pod \"glance-db-create-hhxxm\" (UID: \"e7651127-5741-4e95-8dc4-179999e506d8\") " pod="openstack/glance-db-create-hhxxm" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.906873 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7651127-5741-4e95-8dc4-179999e506d8-operator-scripts\") pod \"glance-db-create-hhxxm\" (UID: \"e7651127-5741-4e95-8dc4-179999e506d8\") " pod="openstack/glance-db-create-hhxxm" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.907833 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7651127-5741-4e95-8dc4-179999e506d8-operator-scripts\") pod \"glance-db-create-hhxxm\" (UID: \"e7651127-5741-4e95-8dc4-179999e506d8\") " pod="openstack/glance-db-create-hhxxm" Dec 03 19:48:07 crc kubenswrapper[4916]: I1203 19:48:07.937093 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w94b8\" (UniqueName: \"kubernetes.io/projected/e7651127-5741-4e95-8dc4-179999e506d8-kube-api-access-w94b8\") pod \"glance-db-create-hhxxm\" (UID: \"e7651127-5741-4e95-8dc4-179999e506d8\") " pod="openstack/glance-db-create-hhxxm" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.008837 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e85d895f-c6a8-45f7-a18e-369f4cd00079-operator-scripts\") pod \"glance-ab84-account-create-update-7hc2j\" (UID: \"e85d895f-c6a8-45f7-a18e-369f4cd00079\") " pod="openstack/glance-ab84-account-create-update-7hc2j" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.009206 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r2mj\" (UniqueName: \"kubernetes.io/projected/e85d895f-c6a8-45f7-a18e-369f4cd00079-kube-api-access-2r2mj\") pod \"glance-ab84-account-create-update-7hc2j\" (UID: \"e85d895f-c6a8-45f7-a18e-369f4cd00079\") " pod="openstack/glance-ab84-account-create-update-7hc2j" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.009705 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e85d895f-c6a8-45f7-a18e-369f4cd00079-operator-scripts\") pod \"glance-ab84-account-create-update-7hc2j\" (UID: \"e85d895f-c6a8-45f7-a18e-369f4cd00079\") " pod="openstack/glance-ab84-account-create-update-7hc2j" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.025331 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2r2mj\" (UniqueName: \"kubernetes.io/projected/e85d895f-c6a8-45f7-a18e-369f4cd00079-kube-api-access-2r2mj\") pod \"glance-ab84-account-create-update-7hc2j\" (UID: \"e85d895f-c6a8-45f7-a18e-369f4cd00079\") " pod="openstack/glance-ab84-account-create-update-7hc2j" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.056438 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hhxxm" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.074916 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-e728-account-create-update-wgg8d" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.110438 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvh2r\" (UniqueName: \"kubernetes.io/projected/6291ea12-342b-49d8-aa3c-671573f55c06-kube-api-access-tvh2r\") pod \"6291ea12-342b-49d8-aa3c-671573f55c06\" (UID: \"6291ea12-342b-49d8-aa3c-671573f55c06\") " Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.110627 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6291ea12-342b-49d8-aa3c-671573f55c06-operator-scripts\") pod \"6291ea12-342b-49d8-aa3c-671573f55c06\" (UID: \"6291ea12-342b-49d8-aa3c-671573f55c06\") " Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.111446 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6291ea12-342b-49d8-aa3c-671573f55c06-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6291ea12-342b-49d8-aa3c-671573f55c06" (UID: "6291ea12-342b-49d8-aa3c-671573f55c06"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.116461 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6291ea12-342b-49d8-aa3c-671573f55c06-kube-api-access-tvh2r" (OuterVolumeSpecName: "kube-api-access-tvh2r") pod "6291ea12-342b-49d8-aa3c-671573f55c06" (UID: "6291ea12-342b-49d8-aa3c-671573f55c06"). InnerVolumeSpecName "kube-api-access-tvh2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.131760 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wszhq" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.134389 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ab84-account-create-update-7hc2j" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.146622 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-q8p4t" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.153347 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3026-account-create-update-c96hn" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.212184 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50419ef1-71cf-4f8a-a74d-48a708e15785-operator-scripts\") pod \"50419ef1-71cf-4f8a-a74d-48a708e15785\" (UID: \"50419ef1-71cf-4f8a-a74d-48a708e15785\") " Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.212454 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8d59p\" (UniqueName: \"kubernetes.io/projected/50419ef1-71cf-4f8a-a74d-48a708e15785-kube-api-access-8d59p\") pod \"50419ef1-71cf-4f8a-a74d-48a708e15785\" (UID: \"50419ef1-71cf-4f8a-a74d-48a708e15785\") " Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.212528 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c646c354-3c53-407c-ae77-4af980d70094-operator-scripts\") pod \"c646c354-3c53-407c-ae77-4af980d70094\" (UID: \"c646c354-3c53-407c-ae77-4af980d70094\") " Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.212751 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sl87m\" (UniqueName: \"kubernetes.io/projected/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-kube-api-access-sl87m\") pod \"eaaed8df-f339-4fc0-a76e-be13e78ef8fd\" (UID: \"eaaed8df-f339-4fc0-a76e-be13e78ef8fd\") " Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.212778 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-operator-scripts\") pod \"eaaed8df-f339-4fc0-a76e-be13e78ef8fd\" (UID: \"eaaed8df-f339-4fc0-a76e-be13e78ef8fd\") " Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.212853 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jghh6\" (UniqueName: \"kubernetes.io/projected/c646c354-3c53-407c-ae77-4af980d70094-kube-api-access-jghh6\") pod \"c646c354-3c53-407c-ae77-4af980d70094\" (UID: \"c646c354-3c53-407c-ae77-4af980d70094\") " Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.213289 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6291ea12-342b-49d8-aa3c-671573f55c06-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.213306 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvh2r\" (UniqueName: \"kubernetes.io/projected/6291ea12-342b-49d8-aa3c-671573f55c06-kube-api-access-tvh2r\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.214391 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c646c354-3c53-407c-ae77-4af980d70094-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c646c354-3c53-407c-ae77-4af980d70094" (UID: "c646c354-3c53-407c-ae77-4af980d70094"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.214660 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50419ef1-71cf-4f8a-a74d-48a708e15785-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "50419ef1-71cf-4f8a-a74d-48a708e15785" (UID: "50419ef1-71cf-4f8a-a74d-48a708e15785"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.214691 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eaaed8df-f339-4fc0-a76e-be13e78ef8fd" (UID: "eaaed8df-f339-4fc0-a76e-be13e78ef8fd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.218276 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50419ef1-71cf-4f8a-a74d-48a708e15785-kube-api-access-8d59p" (OuterVolumeSpecName: "kube-api-access-8d59p") pod "50419ef1-71cf-4f8a-a74d-48a708e15785" (UID: "50419ef1-71cf-4f8a-a74d-48a708e15785"). InnerVolumeSpecName "kube-api-access-8d59p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.218475 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c646c354-3c53-407c-ae77-4af980d70094-kube-api-access-jghh6" (OuterVolumeSpecName: "kube-api-access-jghh6") pod "c646c354-3c53-407c-ae77-4af980d70094" (UID: "c646c354-3c53-407c-ae77-4af980d70094"). InnerVolumeSpecName "kube-api-access-jghh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.218737 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-kube-api-access-sl87m" (OuterVolumeSpecName: "kube-api-access-sl87m") pod "eaaed8df-f339-4fc0-a76e-be13e78ef8fd" (UID: "eaaed8df-f339-4fc0-a76e-be13e78ef8fd"). InnerVolumeSpecName "kube-api-access-sl87m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.313942 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sl87m\" (UniqueName: \"kubernetes.io/projected/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-kube-api-access-sl87m\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.313970 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaaed8df-f339-4fc0-a76e-be13e78ef8fd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.313979 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jghh6\" (UniqueName: \"kubernetes.io/projected/c646c354-3c53-407c-ae77-4af980d70094-kube-api-access-jghh6\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.313990 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50419ef1-71cf-4f8a-a74d-48a708e15785-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.313998 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8d59p\" (UniqueName: \"kubernetes.io/projected/50419ef1-71cf-4f8a-a74d-48a708e15785-kube-api-access-8d59p\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.314008 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c646c354-3c53-407c-ae77-4af980d70094-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.486680 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="258215fe-726c-4a33-81f2-137aa706c6d1" path="/var/lib/kubelet/pods/258215fe-726c-4a33-81f2-137aa706c6d1/volumes" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.548241 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-hhxxm"] Dec 03 19:48:08 crc kubenswrapper[4916]: W1203 19:48:08.551041 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7651127_5741_4e95_8dc4_179999e506d8.slice/crio-c6ee84c38d10857c49e4f9b16dfea5fe6a779438bb06cf45ebf0f361f57a954f WatchSource:0}: Error finding container c6ee84c38d10857c49e4f9b16dfea5fe6a779438bb06cf45ebf0f361f57a954f: Status 404 returned error can't find the container with id c6ee84c38d10857c49e4f9b16dfea5fe6a779438bb06cf45ebf0f361f57a954f Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.557822 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-e728-account-create-update-wgg8d" event={"ID":"6291ea12-342b-49d8-aa3c-671573f55c06","Type":"ContainerDied","Data":"8eab6fd638d5cb8ac3ea51ab8824b71167fa8febb5b108cbd7c0e48dbfaafea2"} Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.557941 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8eab6fd638d5cb8ac3ea51ab8824b71167fa8febb5b108cbd7c0e48dbfaafea2" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.558070 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-e728-account-create-update-wgg8d" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.570960 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-q8p4t" event={"ID":"50419ef1-71cf-4f8a-a74d-48a708e15785","Type":"ContainerDied","Data":"c59a358c2fc3103aac6d55461a7e6b704ab56d4a09297fca7a1fd98da5f1ef3e"} Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.571011 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c59a358c2fc3103aac6d55461a7e6b704ab56d4a09297fca7a1fd98da5f1ef3e" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.571080 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-q8p4t" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.575684 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-wszhq" event={"ID":"c646c354-3c53-407c-ae77-4af980d70094","Type":"ContainerDied","Data":"d40b523d85b74d9692d29f3056d36538d982820717dde5594deb852c90d9e797"} Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.575722 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d40b523d85b74d9692d29f3056d36538d982820717dde5594deb852c90d9e797" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.575776 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-wszhq" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.578702 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-3026-account-create-update-c96hn" event={"ID":"eaaed8df-f339-4fc0-a76e-be13e78ef8fd","Type":"ContainerDied","Data":"5dbd2f7ab5bbc2e295a3b89b7b8ff17e4314ed7fa40028129824b2e638437af1"} Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.578749 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5dbd2f7ab5bbc2e295a3b89b7b8ff17e4314ed7fa40028129824b2e638437af1" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.578831 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-3026-account-create-update-c96hn" Dec 03 19:48:08 crc kubenswrapper[4916]: I1203 19:48:08.623527 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-ab84-account-create-update-7hc2j"] Dec 03 19:48:09 crc kubenswrapper[4916]: I1203 19:48:09.202111 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 03 19:48:09 crc kubenswrapper[4916]: I1203 19:48:09.592546 4916 generic.go:334] "Generic (PLEG): container finished" podID="e85d895f-c6a8-45f7-a18e-369f4cd00079" containerID="38d4e6a1a5d77e6b6322fc12c44975a22dcf3485adacd38ae9acbd4be3e193dc" exitCode=0 Dec 03 19:48:09 crc kubenswrapper[4916]: I1203 19:48:09.592619 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ab84-account-create-update-7hc2j" event={"ID":"e85d895f-c6a8-45f7-a18e-369f4cd00079","Type":"ContainerDied","Data":"38d4e6a1a5d77e6b6322fc12c44975a22dcf3485adacd38ae9acbd4be3e193dc"} Dec 03 19:48:09 crc kubenswrapper[4916]: I1203 19:48:09.592677 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ab84-account-create-update-7hc2j" event={"ID":"e85d895f-c6a8-45f7-a18e-369f4cd00079","Type":"ContainerStarted","Data":"a3301361d6760a88d7b878da37f0eddfc7333e91ec2aa44a62d4844ca9ffde4e"} Dec 03 19:48:09 crc kubenswrapper[4916]: I1203 19:48:09.597474 4916 generic.go:334] "Generic (PLEG): container finished" podID="e7651127-5741-4e95-8dc4-179999e506d8" containerID="5a77c14219fbf503d79ba40e11ef71d09df2161a77e9e5e6b61bf675bc73f9a6" exitCode=0 Dec 03 19:48:09 crc kubenswrapper[4916]: I1203 19:48:09.597529 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-hhxxm" event={"ID":"e7651127-5741-4e95-8dc4-179999e506d8","Type":"ContainerDied","Data":"5a77c14219fbf503d79ba40e11ef71d09df2161a77e9e5e6b61bf675bc73f9a6"} Dec 03 19:48:09 crc kubenswrapper[4916]: I1203 19:48:09.597585 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-hhxxm" event={"ID":"e7651127-5741-4e95-8dc4-179999e506d8","Type":"ContainerStarted","Data":"c6ee84c38d10857c49e4f9b16dfea5fe6a779438bb06cf45ebf0f361f57a954f"} Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.016027 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ab84-account-create-update-7hc2j" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.024742 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hhxxm" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.071600 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2r2mj\" (UniqueName: \"kubernetes.io/projected/e85d895f-c6a8-45f7-a18e-369f4cd00079-kube-api-access-2r2mj\") pod \"e85d895f-c6a8-45f7-a18e-369f4cd00079\" (UID: \"e85d895f-c6a8-45f7-a18e-369f4cd00079\") " Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.071709 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7651127-5741-4e95-8dc4-179999e506d8-operator-scripts\") pod \"e7651127-5741-4e95-8dc4-179999e506d8\" (UID: \"e7651127-5741-4e95-8dc4-179999e506d8\") " Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.072608 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7651127-5741-4e95-8dc4-179999e506d8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e7651127-5741-4e95-8dc4-179999e506d8" (UID: "e7651127-5741-4e95-8dc4-179999e506d8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.072715 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w94b8\" (UniqueName: \"kubernetes.io/projected/e7651127-5741-4e95-8dc4-179999e506d8-kube-api-access-w94b8\") pod \"e7651127-5741-4e95-8dc4-179999e506d8\" (UID: \"e7651127-5741-4e95-8dc4-179999e506d8\") " Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.072794 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e85d895f-c6a8-45f7-a18e-369f4cd00079-operator-scripts\") pod \"e85d895f-c6a8-45f7-a18e-369f4cd00079\" (UID: \"e85d895f-c6a8-45f7-a18e-369f4cd00079\") " Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.073423 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e7651127-5741-4e95-8dc4-179999e506d8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.073940 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e85d895f-c6a8-45f7-a18e-369f4cd00079-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e85d895f-c6a8-45f7-a18e-369f4cd00079" (UID: "e85d895f-c6a8-45f7-a18e-369f4cd00079"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.079789 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7651127-5741-4e95-8dc4-179999e506d8-kube-api-access-w94b8" (OuterVolumeSpecName: "kube-api-access-w94b8") pod "e7651127-5741-4e95-8dc4-179999e506d8" (UID: "e7651127-5741-4e95-8dc4-179999e506d8"). InnerVolumeSpecName "kube-api-access-w94b8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.079801 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e85d895f-c6a8-45f7-a18e-369f4cd00079-kube-api-access-2r2mj" (OuterVolumeSpecName: "kube-api-access-2r2mj") pod "e85d895f-c6a8-45f7-a18e-369f4cd00079" (UID: "e85d895f-c6a8-45f7-a18e-369f4cd00079"). InnerVolumeSpecName "kube-api-access-2r2mj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.175596 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w94b8\" (UniqueName: \"kubernetes.io/projected/e7651127-5741-4e95-8dc4-179999e506d8-kube-api-access-w94b8\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.175632 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e85d895f-c6a8-45f7-a18e-369f4cd00079-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.175646 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2r2mj\" (UniqueName: \"kubernetes.io/projected/e85d895f-c6a8-45f7-a18e-369f4cd00079-kube-api-access-2r2mj\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.617927 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-ab84-account-create-update-7hc2j" event={"ID":"e85d895f-c6a8-45f7-a18e-369f4cd00079","Type":"ContainerDied","Data":"a3301361d6760a88d7b878da37f0eddfc7333e91ec2aa44a62d4844ca9ffde4e"} Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.617996 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-ab84-account-create-update-7hc2j" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.618005 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3301361d6760a88d7b878da37f0eddfc7333e91ec2aa44a62d4844ca9ffde4e" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.619506 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-hhxxm" event={"ID":"e7651127-5741-4e95-8dc4-179999e506d8","Type":"ContainerDied","Data":"c6ee84c38d10857c49e4f9b16dfea5fe6a779438bb06cf45ebf0f361f57a954f"} Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.619559 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6ee84c38d10857c49e4f9b16dfea5fe6a779438bb06cf45ebf0f361f57a954f" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.619627 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-hhxxm" Dec 03 19:48:11 crc kubenswrapper[4916]: I1203 19:48:11.890456 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:48:11 crc kubenswrapper[4916]: E1203 19:48:11.890814 4916 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 19:48:11 crc kubenswrapper[4916]: E1203 19:48:11.891223 4916 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 19:48:11 crc kubenswrapper[4916]: E1203 19:48:11.891352 4916 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift podName:bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5 nodeName:}" failed. No retries permitted until 2025-12-03 19:48:27.891309057 +0000 UTC m=+1123.854119373 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift") pod "swift-storage-0" (UID: "bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5") : configmap "swift-ring-files" not found Dec 03 19:48:12 crc kubenswrapper[4916]: I1203 19:48:12.633060 4916 generic.go:334] "Generic (PLEG): container finished" podID="955b2a04-73e1-4ab5-b322-e301684e8785" containerID="af5da4767ff26770d81f3617197ba09627db299ba3d2cebcf6373ebe333ff9ae" exitCode=0 Dec 03 19:48:12 crc kubenswrapper[4916]: I1203 19:48:12.633137 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-wfwfd" event={"ID":"955b2a04-73e1-4ab5-b322-e301684e8785","Type":"ContainerDied","Data":"af5da4767ff26770d81f3617197ba09627db299ba3d2cebcf6373ebe333ff9ae"} Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000297 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-jn7bl"] Dec 03 19:48:13 crc kubenswrapper[4916]: E1203 19:48:13.000626 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7651127-5741-4e95-8dc4-179999e506d8" containerName="mariadb-database-create" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000638 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7651127-5741-4e95-8dc4-179999e506d8" containerName="mariadb-database-create" Dec 03 19:48:13 crc kubenswrapper[4916]: E1203 19:48:13.000650 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6291ea12-342b-49d8-aa3c-671573f55c06" containerName="mariadb-account-create-update" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000656 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6291ea12-342b-49d8-aa3c-671573f55c06" containerName="mariadb-account-create-update" Dec 03 19:48:13 crc kubenswrapper[4916]: E1203 19:48:13.000665 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c646c354-3c53-407c-ae77-4af980d70094" containerName="mariadb-database-create" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000672 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="c646c354-3c53-407c-ae77-4af980d70094" containerName="mariadb-database-create" Dec 03 19:48:13 crc kubenswrapper[4916]: E1203 19:48:13.000691 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e85d895f-c6a8-45f7-a18e-369f4cd00079" containerName="mariadb-account-create-update" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000697 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e85d895f-c6a8-45f7-a18e-369f4cd00079" containerName="mariadb-account-create-update" Dec 03 19:48:13 crc kubenswrapper[4916]: E1203 19:48:13.000709 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaaed8df-f339-4fc0-a76e-be13e78ef8fd" containerName="mariadb-account-create-update" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000714 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaaed8df-f339-4fc0-a76e-be13e78ef8fd" containerName="mariadb-account-create-update" Dec 03 19:48:13 crc kubenswrapper[4916]: E1203 19:48:13.000729 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50419ef1-71cf-4f8a-a74d-48a708e15785" containerName="mariadb-database-create" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000735 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="50419ef1-71cf-4f8a-a74d-48a708e15785" containerName="mariadb-database-create" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000909 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaaed8df-f339-4fc0-a76e-be13e78ef8fd" containerName="mariadb-account-create-update" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000927 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="e85d895f-c6a8-45f7-a18e-369f4cd00079" containerName="mariadb-account-create-update" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000936 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="50419ef1-71cf-4f8a-a74d-48a708e15785" containerName="mariadb-database-create" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000944 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6291ea12-342b-49d8-aa3c-671573f55c06" containerName="mariadb-account-create-update" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000955 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="c646c354-3c53-407c-ae77-4af980d70094" containerName="mariadb-database-create" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.000964 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7651127-5741-4e95-8dc4-179999e506d8" containerName="mariadb-database-create" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.001445 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.006042 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.011427 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-5xd5f" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.017886 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-jn7bl"] Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.040414 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-stq8b" podUID="7cb5f017-c41b-4af3-8455-e1ab42faa626" containerName="ovn-controller" probeResult="failure" output=< Dec 03 19:48:13 crc kubenswrapper[4916]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 03 19:48:13 crc kubenswrapper[4916]: > Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.113510 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-db-sync-config-data\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.113599 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-combined-ca-bundle\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.113711 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-config-data\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.113798 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nrmk\" (UniqueName: \"kubernetes.io/projected/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-kube-api-access-5nrmk\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.214682 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-config-data\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.214943 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nrmk\" (UniqueName: \"kubernetes.io/projected/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-kube-api-access-5nrmk\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.215065 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-db-sync-config-data\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.215219 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-combined-ca-bundle\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.222027 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-combined-ca-bundle\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.222211 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-config-data\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.222438 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-db-sync-config-data\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.246968 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nrmk\" (UniqueName: \"kubernetes.io/projected/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-kube-api-access-5nrmk\") pod \"glance-db-sync-jn7bl\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.329821 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.967939 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-jn7bl"] Dec 03 19:48:13 crc kubenswrapper[4916]: W1203 19:48:13.974813 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c5b4292_d369_4cdf_b1c7_4da7ddf9643d.slice/crio-10dcbe3db4d910777666e16c589ec2a90aac2461ffb0a4ce8daafdf6434cef5d WatchSource:0}: Error finding container 10dcbe3db4d910777666e16c589ec2a90aac2461ffb0a4ce8daafdf6434cef5d: Status 404 returned error can't find the container with id 10dcbe3db4d910777666e16c589ec2a90aac2461ffb0a4ce8daafdf6434cef5d Dec 03 19:48:13 crc kubenswrapper[4916]: I1203 19:48:13.992157 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.031447 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/955b2a04-73e1-4ab5-b322-e301684e8785-etc-swift\") pod \"955b2a04-73e1-4ab5-b322-e301684e8785\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.031775 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-scripts\") pod \"955b2a04-73e1-4ab5-b322-e301684e8785\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.031858 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-swiftconf\") pod \"955b2a04-73e1-4ab5-b322-e301684e8785\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.031943 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-combined-ca-bundle\") pod \"955b2a04-73e1-4ab5-b322-e301684e8785\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.031985 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-ring-data-devices\") pod \"955b2a04-73e1-4ab5-b322-e301684e8785\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.032016 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcd57\" (UniqueName: \"kubernetes.io/projected/955b2a04-73e1-4ab5-b322-e301684e8785-kube-api-access-lcd57\") pod \"955b2a04-73e1-4ab5-b322-e301684e8785\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.032073 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-dispersionconf\") pod \"955b2a04-73e1-4ab5-b322-e301684e8785\" (UID: \"955b2a04-73e1-4ab5-b322-e301684e8785\") " Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.033792 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "955b2a04-73e1-4ab5-b322-e301684e8785" (UID: "955b2a04-73e1-4ab5-b322-e301684e8785"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.034365 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/955b2a04-73e1-4ab5-b322-e301684e8785-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "955b2a04-73e1-4ab5-b322-e301684e8785" (UID: "955b2a04-73e1-4ab5-b322-e301684e8785"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.038278 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/955b2a04-73e1-4ab5-b322-e301684e8785-kube-api-access-lcd57" (OuterVolumeSpecName: "kube-api-access-lcd57") pod "955b2a04-73e1-4ab5-b322-e301684e8785" (UID: "955b2a04-73e1-4ab5-b322-e301684e8785"). InnerVolumeSpecName "kube-api-access-lcd57". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.040657 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "955b2a04-73e1-4ab5-b322-e301684e8785" (UID: "955b2a04-73e1-4ab5-b322-e301684e8785"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.052419 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-scripts" (OuterVolumeSpecName: "scripts") pod "955b2a04-73e1-4ab5-b322-e301684e8785" (UID: "955b2a04-73e1-4ab5-b322-e301684e8785"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.058108 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "955b2a04-73e1-4ab5-b322-e301684e8785" (UID: "955b2a04-73e1-4ab5-b322-e301684e8785"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.066780 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "955b2a04-73e1-4ab5-b322-e301684e8785" (UID: "955b2a04-73e1-4ab5-b322-e301684e8785"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.134178 4916 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.134215 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.134231 4916 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.134245 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcd57\" (UniqueName: \"kubernetes.io/projected/955b2a04-73e1-4ab5-b322-e301684e8785-kube-api-access-lcd57\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.134259 4916 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/955b2a04-73e1-4ab5-b322-e301684e8785-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.134270 4916 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/955b2a04-73e1-4ab5-b322-e301684e8785-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.134281 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/955b2a04-73e1-4ab5-b322-e301684e8785-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.660746 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-wfwfd" event={"ID":"955b2a04-73e1-4ab5-b322-e301684e8785","Type":"ContainerDied","Data":"158f7d53c92c500a6292a07dace7b7b57444e86725dfc918c2823192e34ecab8"} Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.660784 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="158f7d53c92c500a6292a07dace7b7b57444e86725dfc918c2823192e34ecab8" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.660782 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-wfwfd" Dec 03 19:48:14 crc kubenswrapper[4916]: I1203 19:48:14.662636 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jn7bl" event={"ID":"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d","Type":"ContainerStarted","Data":"10dcbe3db4d910777666e16c589ec2a90aac2461ffb0a4ce8daafdf6434cef5d"} Dec 03 19:48:15 crc kubenswrapper[4916]: I1203 19:48:15.672517 4916 generic.go:334] "Generic (PLEG): container finished" podID="13520585-08f1-45f7-b40d-d53b9f047cfd" containerID="4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a" exitCode=0 Dec 03 19:48:15 crc kubenswrapper[4916]: I1203 19:48:15.672608 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"13520585-08f1-45f7-b40d-d53b9f047cfd","Type":"ContainerDied","Data":"4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a"} Dec 03 19:48:16 crc kubenswrapper[4916]: I1203 19:48:16.159624 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:48:16 crc kubenswrapper[4916]: I1203 19:48:16.159919 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:48:16 crc kubenswrapper[4916]: I1203 19:48:16.159971 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:48:16 crc kubenswrapper[4916]: I1203 19:48:16.161815 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ac2ebe3bbf276071a9bfb2a9d6c5b901691899bf5c59f5b451ee6d04eb0e197f"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 19:48:16 crc kubenswrapper[4916]: I1203 19:48:16.161877 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://ac2ebe3bbf276071a9bfb2a9d6c5b901691899bf5c59f5b451ee6d04eb0e197f" gracePeriod=600 Dec 03 19:48:16 crc kubenswrapper[4916]: I1203 19:48:16.688692 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"13520585-08f1-45f7-b40d-d53b9f047cfd","Type":"ContainerStarted","Data":"37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a"} Dec 03 19:48:16 crc kubenswrapper[4916]: I1203 19:48:16.689855 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:48:16 crc kubenswrapper[4916]: I1203 19:48:16.694208 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"ac2ebe3bbf276071a9bfb2a9d6c5b901691899bf5c59f5b451ee6d04eb0e197f"} Dec 03 19:48:16 crc kubenswrapper[4916]: I1203 19:48:16.694192 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="ac2ebe3bbf276071a9bfb2a9d6c5b901691899bf5c59f5b451ee6d04eb0e197f" exitCode=0 Dec 03 19:48:16 crc kubenswrapper[4916]: I1203 19:48:16.694298 4916 scope.go:117] "RemoveContainer" containerID="dbc6d2dff458c9d2c91a2f82a009f88b78c61b85becef77733114e92974e9b6f" Dec 03 19:48:16 crc kubenswrapper[4916]: I1203 19:48:16.719711 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=51.050768219 podStartE2EDuration="58.719636259s" podCreationTimestamp="2025-12-03 19:47:18 +0000 UTC" firstStartedPulling="2025-12-03 19:47:33.147398309 +0000 UTC m=+1069.110208575" lastFinishedPulling="2025-12-03 19:47:40.816266349 +0000 UTC m=+1076.779076615" observedRunningTime="2025-12-03 19:48:16.714587634 +0000 UTC m=+1112.677397900" watchObservedRunningTime="2025-12-03 19:48:16.719636259 +0000 UTC m=+1112.682446535" Dec 03 19:48:17 crc kubenswrapper[4916]: I1203 19:48:17.708401 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"f766f08ec381c0d446f946242779f93ec8affbc91dd83bc4db900247c021dcf7"} Dec 03 19:48:18 crc kubenswrapper[4916]: I1203 19:48:18.016305 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-stq8b" podUID="7cb5f017-c41b-4af3-8455-e1ab42faa626" containerName="ovn-controller" probeResult="failure" output=< Dec 03 19:48:18 crc kubenswrapper[4916]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 03 19:48:18 crc kubenswrapper[4916]: > Dec 03 19:48:18 crc kubenswrapper[4916]: I1203 19:48:18.714868 4916 generic.go:334] "Generic (PLEG): container finished" podID="6ffb0836-d978-4f53-9a48-1174b647eeaf" containerID="e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7" exitCode=0 Dec 03 19:48:18 crc kubenswrapper[4916]: I1203 19:48:18.714936 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6ffb0836-d978-4f53-9a48-1174b647eeaf","Type":"ContainerDied","Data":"e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7"} Dec 03 19:48:22 crc kubenswrapper[4916]: I1203 19:48:22.984738 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-stq8b" podUID="7cb5f017-c41b-4af3-8455-e1ab42faa626" containerName="ovn-controller" probeResult="failure" output=< Dec 03 19:48:22 crc kubenswrapper[4916]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 03 19:48:22 crc kubenswrapper[4916]: > Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.041030 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.041714 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-7wkt5" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.309161 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-stq8b-config-qgzv6"] Dec 03 19:48:23 crc kubenswrapper[4916]: E1203 19:48:23.312144 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="955b2a04-73e1-4ab5-b322-e301684e8785" containerName="swift-ring-rebalance" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.312173 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="955b2a04-73e1-4ab5-b322-e301684e8785" containerName="swift-ring-rebalance" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.325058 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="955b2a04-73e1-4ab5-b322-e301684e8785" containerName="swift-ring-rebalance" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.329937 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.333462 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.343077 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-stq8b-config-qgzv6"] Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.430810 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-additional-scripts\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.431488 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run-ovn\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.431520 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-log-ovn\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.431547 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2zhq\" (UniqueName: \"kubernetes.io/projected/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-kube-api-access-s2zhq\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.431598 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.431619 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-scripts\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.532966 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-additional-scripts\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.533011 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run-ovn\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.533036 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-log-ovn\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.533376 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-log-ovn\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.533397 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run-ovn\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.533062 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2zhq\" (UniqueName: \"kubernetes.io/projected/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-kube-api-access-s2zhq\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.533713 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-additional-scripts\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.533857 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.533963 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-scripts\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.533924 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.536336 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-scripts\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.551779 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2zhq\" (UniqueName: \"kubernetes.io/projected/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-kube-api-access-s2zhq\") pod \"ovn-controller-stq8b-config-qgzv6\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:23 crc kubenswrapper[4916]: I1203 19:48:23.655703 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:25 crc kubenswrapper[4916]: I1203 19:48:25.912130 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-stq8b-config-qgzv6"] Dec 03 19:48:26 crc kubenswrapper[4916]: I1203 19:48:26.787223 4916 generic.go:334] "Generic (PLEG): container finished" podID="fbdb0e7a-46e3-43f2-aa50-0391d6b97554" containerID="59c8b75f8b7f7e8931893628d57d1aacb5dc1567ff29fba6d520b87c2e53a0de" exitCode=0 Dec 03 19:48:26 crc kubenswrapper[4916]: I1203 19:48:26.787365 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-stq8b-config-qgzv6" event={"ID":"fbdb0e7a-46e3-43f2-aa50-0391d6b97554","Type":"ContainerDied","Data":"59c8b75f8b7f7e8931893628d57d1aacb5dc1567ff29fba6d520b87c2e53a0de"} Dec 03 19:48:26 crc kubenswrapper[4916]: I1203 19:48:26.787655 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-stq8b-config-qgzv6" event={"ID":"fbdb0e7a-46e3-43f2-aa50-0391d6b97554","Type":"ContainerStarted","Data":"9b08d8cde914309a9ed7803657a63b0d24ddc4501be65af4635a7d04d1cba1bb"} Dec 03 19:48:26 crc kubenswrapper[4916]: I1203 19:48:26.789321 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6ffb0836-d978-4f53-9a48-1174b647eeaf","Type":"ContainerStarted","Data":"cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6"} Dec 03 19:48:26 crc kubenswrapper[4916]: I1203 19:48:26.789508 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 19:48:26 crc kubenswrapper[4916]: I1203 19:48:26.791142 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jn7bl" event={"ID":"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d","Type":"ContainerStarted","Data":"4ace83561891847911640b21726dc52ec0f4e20225f05450c0c3bc9fe796129a"} Dec 03 19:48:26 crc kubenswrapper[4916]: I1203 19:48:26.843344 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=61.057145651 podStartE2EDuration="1m9.843313582s" podCreationTimestamp="2025-12-03 19:47:17 +0000 UTC" firstStartedPulling="2025-12-03 19:47:32.979207019 +0000 UTC m=+1068.942017285" lastFinishedPulling="2025-12-03 19:47:41.76537495 +0000 UTC m=+1077.728185216" observedRunningTime="2025-12-03 19:48:26.833743777 +0000 UTC m=+1122.796554043" watchObservedRunningTime="2025-12-03 19:48:26.843313582 +0000 UTC m=+1122.806123848" Dec 03 19:48:26 crc kubenswrapper[4916]: I1203 19:48:26.867136 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-jn7bl" podStartSLOduration=3.152708055 podStartE2EDuration="14.867117396s" podCreationTimestamp="2025-12-03 19:48:12 +0000 UTC" firstStartedPulling="2025-12-03 19:48:13.976864405 +0000 UTC m=+1109.939674671" lastFinishedPulling="2025-12-03 19:48:25.691273726 +0000 UTC m=+1121.654084012" observedRunningTime="2025-12-03 19:48:26.858304121 +0000 UTC m=+1122.821114387" watchObservedRunningTime="2025-12-03 19:48:26.867117396 +0000 UTC m=+1122.829927662" Dec 03 19:48:27 crc kubenswrapper[4916]: I1203 19:48:27.908709 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:48:27 crc kubenswrapper[4916]: I1203 19:48:27.915978 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5-etc-swift\") pod \"swift-storage-0\" (UID: \"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5\") " pod="openstack/swift-storage-0" Dec 03 19:48:27 crc kubenswrapper[4916]: I1203 19:48:27.991101 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.002325 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-stq8b" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.132219 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.213542 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2zhq\" (UniqueName: \"kubernetes.io/projected/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-kube-api-access-s2zhq\") pod \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.213677 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-scripts\") pod \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.213712 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run\") pod \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.213760 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run-ovn\") pod \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.213857 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-additional-scripts\") pod \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.213903 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-log-ovn\") pod \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\" (UID: \"fbdb0e7a-46e3-43f2-aa50-0391d6b97554\") " Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.214297 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "fbdb0e7a-46e3-43f2-aa50-0391d6b97554" (UID: "fbdb0e7a-46e3-43f2-aa50-0391d6b97554"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.214336 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run" (OuterVolumeSpecName: "var-run") pod "fbdb0e7a-46e3-43f2-aa50-0391d6b97554" (UID: "fbdb0e7a-46e3-43f2-aa50-0391d6b97554"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.214356 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "fbdb0e7a-46e3-43f2-aa50-0391d6b97554" (UID: "fbdb0e7a-46e3-43f2-aa50-0391d6b97554"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.214734 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "fbdb0e7a-46e3-43f2-aa50-0391d6b97554" (UID: "fbdb0e7a-46e3-43f2-aa50-0391d6b97554"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.214861 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-scripts" (OuterVolumeSpecName: "scripts") pod "fbdb0e7a-46e3-43f2-aa50-0391d6b97554" (UID: "fbdb0e7a-46e3-43f2-aa50-0391d6b97554"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.226425 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-kube-api-access-s2zhq" (OuterVolumeSpecName: "kube-api-access-s2zhq") pod "fbdb0e7a-46e3-43f2-aa50-0391d6b97554" (UID: "fbdb0e7a-46e3-43f2-aa50-0391d6b97554"). InnerVolumeSpecName "kube-api-access-s2zhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.315942 4916 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.315996 4916 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.316011 4916 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.316023 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2zhq\" (UniqueName: \"kubernetes.io/projected/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-kube-api-access-s2zhq\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.316037 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.316050 4916 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fbdb0e7a-46e3-43f2-aa50-0391d6b97554-var-run\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:28 crc kubenswrapper[4916]: W1203 19:48:28.393132 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb20ada8_afbe_42ef_a8b8_a22cd16d4dc5.slice/crio-06a65329cbc9f070ec7dd2b9bc63d680d28f6e73f858b7c83bd2746abf7a5397 WatchSource:0}: Error finding container 06a65329cbc9f070ec7dd2b9bc63d680d28f6e73f858b7c83bd2746abf7a5397: Status 404 returned error can't find the container with id 06a65329cbc9f070ec7dd2b9bc63d680d28f6e73f858b7c83bd2746abf7a5397 Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.394072 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.810291 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-stq8b-config-qgzv6" event={"ID":"fbdb0e7a-46e3-43f2-aa50-0391d6b97554","Type":"ContainerDied","Data":"9b08d8cde914309a9ed7803657a63b0d24ddc4501be65af4635a7d04d1cba1bb"} Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.810826 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b08d8cde914309a9ed7803657a63b0d24ddc4501be65af4635a7d04d1cba1bb" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.810744 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-stq8b-config-qgzv6" Dec 03 19:48:28 crc kubenswrapper[4916]: I1203 19:48:28.812538 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"06a65329cbc9f070ec7dd2b9bc63d680d28f6e73f858b7c83bd2746abf7a5397"} Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.233326 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-stq8b-config-qgzv6"] Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.242846 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-stq8b-config-qgzv6"] Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.363969 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-stq8b-config-58lgg"] Dec 03 19:48:29 crc kubenswrapper[4916]: E1203 19:48:29.364335 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbdb0e7a-46e3-43f2-aa50-0391d6b97554" containerName="ovn-config" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.364356 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbdb0e7a-46e3-43f2-aa50-0391d6b97554" containerName="ovn-config" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.364550 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbdb0e7a-46e3-43f2-aa50-0391d6b97554" containerName="ovn-config" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.365186 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.370166 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.376644 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-stq8b-config-58lgg"] Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.431763 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-additional-scripts\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.432303 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-log-ovn\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.432331 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-scripts\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.432388 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.432492 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run-ovn\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.432533 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4ngg\" (UniqueName: \"kubernetes.io/projected/364b12f1-18a0-4586-b1f6-e38c99636c5e-kube-api-access-r4ngg\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.526442 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.533813 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.533913 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run-ovn\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.533944 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4ngg\" (UniqueName: \"kubernetes.io/projected/364b12f1-18a0-4586-b1f6-e38c99636c5e-kube-api-access-r4ngg\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.534022 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-additional-scripts\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.534061 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-scripts\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.534081 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-log-ovn\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.534253 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.534271 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run-ovn\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.534548 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-log-ovn\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.535066 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-additional-scripts\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.536435 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-scripts\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.565557 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4ngg\" (UniqueName: \"kubernetes.io/projected/364b12f1-18a0-4586-b1f6-e38c99636c5e-kube-api-access-r4ngg\") pod \"ovn-controller-stq8b-config-58lgg\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.683743 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.831731 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"bda379e87435243d2a09f8c55d2c6d6f6792bbaeb054dc312d75798ca9c89923"} Dec 03 19:48:29 crc kubenswrapper[4916]: I1203 19:48:29.832033 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"79544e4c4ffa375820ed34ec38bb678b5cd4442979e376f63d9b559607dd0faa"} Dec 03 19:48:30 crc kubenswrapper[4916]: W1203 19:48:30.303308 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod364b12f1_18a0_4586_b1f6_e38c99636c5e.slice/crio-a07d05aa4848e3adee453545376a7395b67a3ab05e2ab2403b2d2acb018d0ab3 WatchSource:0}: Error finding container a07d05aa4848e3adee453545376a7395b67a3ab05e2ab2403b2d2acb018d0ab3: Status 404 returned error can't find the container with id a07d05aa4848e3adee453545376a7395b67a3ab05e2ab2403b2d2acb018d0ab3 Dec 03 19:48:30 crc kubenswrapper[4916]: I1203 19:48:30.304838 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-stq8b-config-58lgg"] Dec 03 19:48:30 crc kubenswrapper[4916]: I1203 19:48:30.488238 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbdb0e7a-46e3-43f2-aa50-0391d6b97554" path="/var/lib/kubelet/pods/fbdb0e7a-46e3-43f2-aa50-0391d6b97554/volumes" Dec 03 19:48:30 crc kubenswrapper[4916]: I1203 19:48:30.841800 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-stq8b-config-58lgg" event={"ID":"364b12f1-18a0-4586-b1f6-e38c99636c5e","Type":"ContainerStarted","Data":"2f8be4f8f2bdc2311e9669b998c36553b0f5983183ef8919b1928aad6c131ad0"} Dec 03 19:48:30 crc kubenswrapper[4916]: I1203 19:48:30.842367 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-stq8b-config-58lgg" event={"ID":"364b12f1-18a0-4586-b1f6-e38c99636c5e","Type":"ContainerStarted","Data":"a07d05aa4848e3adee453545376a7395b67a3ab05e2ab2403b2d2acb018d0ab3"} Dec 03 19:48:30 crc kubenswrapper[4916]: I1203 19:48:30.845092 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"39b8e4f0dc4e3d6818b46689524acbb0c249be5c5d262260840d76b86b69129f"} Dec 03 19:48:30 crc kubenswrapper[4916]: I1203 19:48:30.845124 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"275fe63784132c1358d341f3809a29c2991c62843a564b74f9ecde0179975797"} Dec 03 19:48:30 crc kubenswrapper[4916]: I1203 19:48:30.867016 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-stq8b-config-58lgg" podStartSLOduration=1.8669945970000001 podStartE2EDuration="1.866994597s" podCreationTimestamp="2025-12-03 19:48:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:30.862644551 +0000 UTC m=+1126.825454817" watchObservedRunningTime="2025-12-03 19:48:30.866994597 +0000 UTC m=+1126.829804863" Dec 03 19:48:31 crc kubenswrapper[4916]: I1203 19:48:31.856226 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"5c2f43405bfa8e97e6090c94b057b498e39a208ad321a903079ca79da847b830"} Dec 03 19:48:31 crc kubenswrapper[4916]: I1203 19:48:31.858123 4916 generic.go:334] "Generic (PLEG): container finished" podID="364b12f1-18a0-4586-b1f6-e38c99636c5e" containerID="2f8be4f8f2bdc2311e9669b998c36553b0f5983183ef8919b1928aad6c131ad0" exitCode=0 Dec 03 19:48:31 crc kubenswrapper[4916]: I1203 19:48:31.858167 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-stq8b-config-58lgg" event={"ID":"364b12f1-18a0-4586-b1f6-e38c99636c5e","Type":"ContainerDied","Data":"2f8be4f8f2bdc2311e9669b998c36553b0f5983183ef8919b1928aad6c131ad0"} Dec 03 19:48:32 crc kubenswrapper[4916]: I1203 19:48:32.881381 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"07bd1e12307b0926dc473b1436dbb1440820f4e6fbe1b9f7a819596f4a360a23"} Dec 03 19:48:32 crc kubenswrapper[4916]: I1203 19:48:32.881780 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"2e26e9800dad995aed8cf72c3a98c842d440abe63413d507769df5b66e4003b4"} Dec 03 19:48:32 crc kubenswrapper[4916]: I1203 19:48:32.881796 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"bd93f70b5893dcd845730fa6add2441cdad5efd73b420f898b7ef64381b8ad25"} Dec 03 19:48:32 crc kubenswrapper[4916]: I1203 19:48:32.884612 4916 generic.go:334] "Generic (PLEG): container finished" podID="8c5b4292-d369-4cdf-b1c7-4da7ddf9643d" containerID="4ace83561891847911640b21726dc52ec0f4e20225f05450c0c3bc9fe796129a" exitCode=0 Dec 03 19:48:32 crc kubenswrapper[4916]: I1203 19:48:32.884935 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jn7bl" event={"ID":"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d","Type":"ContainerDied","Data":"4ace83561891847911640b21726dc52ec0f4e20225f05450c0c3bc9fe796129a"} Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.381583 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.496016 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run\") pod \"364b12f1-18a0-4586-b1f6-e38c99636c5e\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.496082 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-additional-scripts\") pod \"364b12f1-18a0-4586-b1f6-e38c99636c5e\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.496110 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run" (OuterVolumeSpecName: "var-run") pod "364b12f1-18a0-4586-b1f6-e38c99636c5e" (UID: "364b12f1-18a0-4586-b1f6-e38c99636c5e"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.496136 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-log-ovn\") pod \"364b12f1-18a0-4586-b1f6-e38c99636c5e\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.496165 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "364b12f1-18a0-4586-b1f6-e38c99636c5e" (UID: "364b12f1-18a0-4586-b1f6-e38c99636c5e"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.496281 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-scripts\") pod \"364b12f1-18a0-4586-b1f6-e38c99636c5e\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.496397 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run-ovn\") pod \"364b12f1-18a0-4586-b1f6-e38c99636c5e\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.496424 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "364b12f1-18a0-4586-b1f6-e38c99636c5e" (UID: "364b12f1-18a0-4586-b1f6-e38c99636c5e"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.496430 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4ngg\" (UniqueName: \"kubernetes.io/projected/364b12f1-18a0-4586-b1f6-e38c99636c5e-kube-api-access-r4ngg\") pod \"364b12f1-18a0-4586-b1f6-e38c99636c5e\" (UID: \"364b12f1-18a0-4586-b1f6-e38c99636c5e\") " Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.496960 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "364b12f1-18a0-4586-b1f6-e38c99636c5e" (UID: "364b12f1-18a0-4586-b1f6-e38c99636c5e"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.497181 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-scripts" (OuterVolumeSpecName: "scripts") pod "364b12f1-18a0-4586-b1f6-e38c99636c5e" (UID: "364b12f1-18a0-4586-b1f6-e38c99636c5e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.497249 4916 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.497265 4916 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-run\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.497275 4916 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.497286 4916 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/364b12f1-18a0-4586-b1f6-e38c99636c5e-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.502293 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/364b12f1-18a0-4586-b1f6-e38c99636c5e-kube-api-access-r4ngg" (OuterVolumeSpecName: "kube-api-access-r4ngg") pod "364b12f1-18a0-4586-b1f6-e38c99636c5e" (UID: "364b12f1-18a0-4586-b1f6-e38c99636c5e"). InnerVolumeSpecName "kube-api-access-r4ngg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.598993 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4ngg\" (UniqueName: \"kubernetes.io/projected/364b12f1-18a0-4586-b1f6-e38c99636c5e-kube-api-access-r4ngg\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.599044 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/364b12f1-18a0-4586-b1f6-e38c99636c5e-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.908020 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"50337df3f96959da8ebfb981b43ecbe92c0c52c27a674a565f82154e710963bf"} Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.908429 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"76ef3a66c13a67f8070819076c382954d099156884f0a920c8bdf44bbbe21927"} Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.911548 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-stq8b-config-58lgg" event={"ID":"364b12f1-18a0-4586-b1f6-e38c99636c5e","Type":"ContainerDied","Data":"a07d05aa4848e3adee453545376a7395b67a3ab05e2ab2403b2d2acb018d0ab3"} Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.911602 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-stq8b-config-58lgg" Dec 03 19:48:33 crc kubenswrapper[4916]: I1203 19:48:33.911609 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a07d05aa4848e3adee453545376a7395b67a3ab05e2ab2403b2d2acb018d0ab3" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.451203 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-stq8b-config-58lgg"] Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.455930 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.463426 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-stq8b-config-58lgg"] Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.491818 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="364b12f1-18a0-4586-b1f6-e38c99636c5e" path="/var/lib/kubelet/pods/364b12f1-18a0-4586-b1f6-e38c99636c5e/volumes" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.512337 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-combined-ca-bundle\") pod \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.512390 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-config-data\") pod \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.512468 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-db-sync-config-data\") pod \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.512533 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nrmk\" (UniqueName: \"kubernetes.io/projected/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-kube-api-access-5nrmk\") pod \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\" (UID: \"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d\") " Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.532752 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-kube-api-access-5nrmk" (OuterVolumeSpecName: "kube-api-access-5nrmk") pod "8c5b4292-d369-4cdf-b1c7-4da7ddf9643d" (UID: "8c5b4292-d369-4cdf-b1c7-4da7ddf9643d"). InnerVolumeSpecName "kube-api-access-5nrmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.557631 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "8c5b4292-d369-4cdf-b1c7-4da7ddf9643d" (UID: "8c5b4292-d369-4cdf-b1c7-4da7ddf9643d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.561104 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c5b4292-d369-4cdf-b1c7-4da7ddf9643d" (UID: "8c5b4292-d369-4cdf-b1c7-4da7ddf9643d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.580942 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-config-data" (OuterVolumeSpecName: "config-data") pod "8c5b4292-d369-4cdf-b1c7-4da7ddf9643d" (UID: "8c5b4292-d369-4cdf-b1c7-4da7ddf9643d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.613787 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nrmk\" (UniqueName: \"kubernetes.io/projected/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-kube-api-access-5nrmk\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.614069 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.614182 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.614256 4916 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.926896 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jn7bl" event={"ID":"8c5b4292-d369-4cdf-b1c7-4da7ddf9643d","Type":"ContainerDied","Data":"10dcbe3db4d910777666e16c589ec2a90aac2461ffb0a4ce8daafdf6434cef5d"} Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.926944 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jn7bl" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.926960 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10dcbe3db4d910777666e16c589ec2a90aac2461ffb0a4ce8daafdf6434cef5d" Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.934874 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"a991d2ec2d0ebb0b936a2fc210bb7c74152bb2f962b393c550ea3a50137e33e6"} Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.934926 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"327e344d9ebb160977ccb8ad5e05ae162233ce141d39771ade7af586db05809a"} Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.934942 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"f08047a31393ea6ad2e5fa6cfb4b67f50aa5f5d63ed70f26bbe20c40ceb4b91d"} Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.934953 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"76a6b1027488e3fd331f084707796e68fc6b3e436a3c0135c54429d89ee39d14"} Dec 03 19:48:34 crc kubenswrapper[4916]: I1203 19:48:34.934964 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5","Type":"ContainerStarted","Data":"d9cb9b5658e4331996984100b680f9d7368377baa1de3c5c07f7aefb14ded750"} Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:34.997160 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.111993863 podStartE2EDuration="40.99713986s" podCreationTimestamp="2025-12-03 19:47:54 +0000 UTC" firstStartedPulling="2025-12-03 19:48:28.395133273 +0000 UTC m=+1124.357943539" lastFinishedPulling="2025-12-03 19:48:33.28027928 +0000 UTC m=+1129.243089536" observedRunningTime="2025-12-03 19:48:34.986211299 +0000 UTC m=+1130.949021565" watchObservedRunningTime="2025-12-03 19:48:34.99713986 +0000 UTC m=+1130.959950136" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.291410 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b946c75cc-frxvr"] Dec 03 19:48:35 crc kubenswrapper[4916]: E1203 19:48:35.291776 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="364b12f1-18a0-4586-b1f6-e38c99636c5e" containerName="ovn-config" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.291792 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="364b12f1-18a0-4586-b1f6-e38c99636c5e" containerName="ovn-config" Dec 03 19:48:35 crc kubenswrapper[4916]: E1203 19:48:35.291810 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c5b4292-d369-4cdf-b1c7-4da7ddf9643d" containerName="glance-db-sync" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.291816 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c5b4292-d369-4cdf-b1c7-4da7ddf9643d" containerName="glance-db-sync" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.291955 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="364b12f1-18a0-4586-b1f6-e38c99636c5e" containerName="ovn-config" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.291983 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c5b4292-d369-4cdf-b1c7-4da7ddf9643d" containerName="glance-db-sync" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.292761 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.318046 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b946c75cc-frxvr"] Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.323031 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-nb\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.323086 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2dbs\" (UniqueName: \"kubernetes.io/projected/ffc697c0-8d8a-4df4-b97b-dafe21596244-kube-api-access-g2dbs\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.323116 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-config\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.323140 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-dns-svc\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.323168 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-sb\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.415839 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b946c75cc-frxvr"] Dec 03 19:48:35 crc kubenswrapper[4916]: E1203 19:48:35.416587 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc kube-api-access-g2dbs ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" podUID="ffc697c0-8d8a-4df4-b97b-dafe21596244" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.428008 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-nb\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.428073 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2dbs\" (UniqueName: \"kubernetes.io/projected/ffc697c0-8d8a-4df4-b97b-dafe21596244-kube-api-access-g2dbs\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.428301 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-config\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.428325 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-dns-svc\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.428341 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-sb\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.429186 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-sb\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.429843 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-nb\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.430448 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-config\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.430579 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-dns-svc\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.443195 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jtfbw"] Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.444556 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.447700 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.456857 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2dbs\" (UniqueName: \"kubernetes.io/projected/ffc697c0-8d8a-4df4-b97b-dafe21596244-kube-api-access-g2dbs\") pod \"dnsmasq-dns-5b946c75cc-frxvr\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.476158 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jtfbw"] Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.529864 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.530085 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.530175 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-config\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.530363 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpsqz\" (UniqueName: \"kubernetes.io/projected/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-kube-api-access-bpsqz\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.530421 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.530464 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.632581 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpsqz\" (UniqueName: \"kubernetes.io/projected/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-kube-api-access-bpsqz\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.632653 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.632685 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.632716 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.632809 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.632844 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-config\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.633645 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-sb\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.633814 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-nb\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.633818 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-svc\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.633894 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-swift-storage-0\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.634188 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-config\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.650537 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpsqz\" (UniqueName: \"kubernetes.io/projected/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-kube-api-access-bpsqz\") pod \"dnsmasq-dns-74f6bcbc87-jtfbw\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.805091 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.950153 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:35 crc kubenswrapper[4916]: I1203 19:48:35.969744 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.045306 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jtfbw"] Dec 03 19:48:36 crc kubenswrapper[4916]: W1203 19:48:36.054437 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8fb5ee1_e661_4656_94c6_2dcac6ce93b6.slice/crio-fd9e312195bfbeee4a7090dbc6a955453163e6acd1ba7c400808ecb513ad8c18 WatchSource:0}: Error finding container fd9e312195bfbeee4a7090dbc6a955453163e6acd1ba7c400808ecb513ad8c18: Status 404 returned error can't find the container with id fd9e312195bfbeee4a7090dbc6a955453163e6acd1ba7c400808ecb513ad8c18 Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.141158 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-config\") pod \"ffc697c0-8d8a-4df4-b97b-dafe21596244\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.141218 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-nb\") pod \"ffc697c0-8d8a-4df4-b97b-dafe21596244\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.141249 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2dbs\" (UniqueName: \"kubernetes.io/projected/ffc697c0-8d8a-4df4-b97b-dafe21596244-kube-api-access-g2dbs\") pod \"ffc697c0-8d8a-4df4-b97b-dafe21596244\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.141303 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-sb\") pod \"ffc697c0-8d8a-4df4-b97b-dafe21596244\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.141501 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-dns-svc\") pod \"ffc697c0-8d8a-4df4-b97b-dafe21596244\" (UID: \"ffc697c0-8d8a-4df4-b97b-dafe21596244\") " Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.141756 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-config" (OuterVolumeSpecName: "config") pod "ffc697c0-8d8a-4df4-b97b-dafe21596244" (UID: "ffc697c0-8d8a-4df4-b97b-dafe21596244"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.142184 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ffc697c0-8d8a-4df4-b97b-dafe21596244" (UID: "ffc697c0-8d8a-4df4-b97b-dafe21596244"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.142207 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ffc697c0-8d8a-4df4-b97b-dafe21596244" (UID: "ffc697c0-8d8a-4df4-b97b-dafe21596244"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.142732 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.142826 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ffc697c0-8d8a-4df4-b97b-dafe21596244" (UID: "ffc697c0-8d8a-4df4-b97b-dafe21596244"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.142994 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.143017 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.145158 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffc697c0-8d8a-4df4-b97b-dafe21596244-kube-api-access-g2dbs" (OuterVolumeSpecName: "kube-api-access-g2dbs") pod "ffc697c0-8d8a-4df4-b97b-dafe21596244" (UID: "ffc697c0-8d8a-4df4-b97b-dafe21596244"). InnerVolumeSpecName "kube-api-access-g2dbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.244872 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2dbs\" (UniqueName: \"kubernetes.io/projected/ffc697c0-8d8a-4df4-b97b-dafe21596244-kube-api-access-g2dbs\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.244904 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ffc697c0-8d8a-4df4-b97b-dafe21596244-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.959396 4916 generic.go:334] "Generic (PLEG): container finished" podID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerID="a02769c5632c765d76c1ed1f73faf7b1ed75c71fe378ab81a2a7a4b906b55fcc" exitCode=0 Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.959452 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" event={"ID":"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6","Type":"ContainerDied","Data":"a02769c5632c765d76c1ed1f73faf7b1ed75c71fe378ab81a2a7a4b906b55fcc"} Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.959808 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" event={"ID":"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6","Type":"ContainerStarted","Data":"fd9e312195bfbeee4a7090dbc6a955453163e6acd1ba7c400808ecb513ad8c18"} Dec 03 19:48:36 crc kubenswrapper[4916]: I1203 19:48:36.959746 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b946c75cc-frxvr" Dec 03 19:48:37 crc kubenswrapper[4916]: I1203 19:48:37.040631 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b946c75cc-frxvr"] Dec 03 19:48:37 crc kubenswrapper[4916]: I1203 19:48:37.044799 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b946c75cc-frxvr"] Dec 03 19:48:37 crc kubenswrapper[4916]: I1203 19:48:37.974159 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" event={"ID":"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6","Type":"ContainerStarted","Data":"250f60d34758f570cddca0f5143b1c42ac04c018bc251bbddd6f7a39d6bd4506"} Dec 03 19:48:37 crc kubenswrapper[4916]: I1203 19:48:37.975500 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:38 crc kubenswrapper[4916]: I1203 19:48:38.002705 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" podStartSLOduration=3.002685066 podStartE2EDuration="3.002685066s" podCreationTimestamp="2025-12-03 19:48:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:37.995971897 +0000 UTC m=+1133.958782173" watchObservedRunningTime="2025-12-03 19:48:38.002685066 +0000 UTC m=+1133.965495342" Dec 03 19:48:38 crc kubenswrapper[4916]: I1203 19:48:38.496749 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffc697c0-8d8a-4df4-b97b-dafe21596244" path="/var/lib/kubelet/pods/ffc697c0-8d8a-4df4-b97b-dafe21596244/volumes" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.210258 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.547626 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-d2ls9"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.548644 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-d2ls9" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.560887 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-d2ls9"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.618411 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-267xc\" (UniqueName: \"kubernetes.io/projected/97246a98-afa5-477d-9528-19c6fd55a094-kube-api-access-267xc\") pod \"barbican-db-create-d2ls9\" (UID: \"97246a98-afa5-477d-9528-19c6fd55a094\") " pod="openstack/barbican-db-create-d2ls9" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.618465 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97246a98-afa5-477d-9528-19c6fd55a094-operator-scripts\") pod \"barbican-db-create-d2ls9\" (UID: \"97246a98-afa5-477d-9528-19c6fd55a094\") " pod="openstack/barbican-db-create-d2ls9" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.665382 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-j5b5c"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.666493 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-j5b5c" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.684026 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-b86a-account-create-update-zjg4j"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.685094 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b86a-account-create-update-zjg4j" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.687554 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.709779 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-j5b5c"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.716357 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-b86a-account-create-update-zjg4j"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.719495 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97246a98-afa5-477d-9528-19c6fd55a094-operator-scripts\") pod \"barbican-db-create-d2ls9\" (UID: \"97246a98-afa5-477d-9528-19c6fd55a094\") " pod="openstack/barbican-db-create-d2ls9" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.719584 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5djr\" (UniqueName: \"kubernetes.io/projected/321a3852-3277-44d8-a126-a8549e29d224-kube-api-access-q5djr\") pod \"cinder-b86a-account-create-update-zjg4j\" (UID: \"321a3852-3277-44d8-a126-a8549e29d224\") " pod="openstack/cinder-b86a-account-create-update-zjg4j" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.719641 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9622e2e-7d39-4ec8-b6fc-580eee868216-operator-scripts\") pod \"heat-db-create-j5b5c\" (UID: \"d9622e2e-7d39-4ec8-b6fc-580eee868216\") " pod="openstack/heat-db-create-j5b5c" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.719673 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8m8n\" (UniqueName: \"kubernetes.io/projected/d9622e2e-7d39-4ec8-b6fc-580eee868216-kube-api-access-p8m8n\") pod \"heat-db-create-j5b5c\" (UID: \"d9622e2e-7d39-4ec8-b6fc-580eee868216\") " pod="openstack/heat-db-create-j5b5c" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.719692 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-267xc\" (UniqueName: \"kubernetes.io/projected/97246a98-afa5-477d-9528-19c6fd55a094-kube-api-access-267xc\") pod \"barbican-db-create-d2ls9\" (UID: \"97246a98-afa5-477d-9528-19c6fd55a094\") " pod="openstack/barbican-db-create-d2ls9" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.719714 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/321a3852-3277-44d8-a126-a8549e29d224-operator-scripts\") pod \"cinder-b86a-account-create-update-zjg4j\" (UID: \"321a3852-3277-44d8-a126-a8549e29d224\") " pod="openstack/cinder-b86a-account-create-update-zjg4j" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.720400 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97246a98-afa5-477d-9528-19c6fd55a094-operator-scripts\") pod \"barbican-db-create-d2ls9\" (UID: \"97246a98-afa5-477d-9528-19c6fd55a094\") " pod="openstack/barbican-db-create-d2ls9" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.747267 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-267xc\" (UniqueName: \"kubernetes.io/projected/97246a98-afa5-477d-9528-19c6fd55a094-kube-api-access-267xc\") pod \"barbican-db-create-d2ls9\" (UID: \"97246a98-afa5-477d-9528-19c6fd55a094\") " pod="openstack/barbican-db-create-d2ls9" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.768633 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-d242-account-create-update-nl9k8"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.769663 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-d242-account-create-update-nl9k8" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.771955 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.780520 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-d242-account-create-update-nl9k8"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.822242 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5djr\" (UniqueName: \"kubernetes.io/projected/321a3852-3277-44d8-a126-a8549e29d224-kube-api-access-q5djr\") pod \"cinder-b86a-account-create-update-zjg4j\" (UID: \"321a3852-3277-44d8-a126-a8549e29d224\") " pod="openstack/cinder-b86a-account-create-update-zjg4j" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.822324 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9622e2e-7d39-4ec8-b6fc-580eee868216-operator-scripts\") pod \"heat-db-create-j5b5c\" (UID: \"d9622e2e-7d39-4ec8-b6fc-580eee868216\") " pod="openstack/heat-db-create-j5b5c" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.822363 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8m8n\" (UniqueName: \"kubernetes.io/projected/d9622e2e-7d39-4ec8-b6fc-580eee868216-kube-api-access-p8m8n\") pod \"heat-db-create-j5b5c\" (UID: \"d9622e2e-7d39-4ec8-b6fc-580eee868216\") " pod="openstack/heat-db-create-j5b5c" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.822383 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-operator-scripts\") pod \"heat-d242-account-create-update-nl9k8\" (UID: \"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca\") " pod="openstack/heat-d242-account-create-update-nl9k8" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.822403 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvqsx\" (UniqueName: \"kubernetes.io/projected/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-kube-api-access-kvqsx\") pod \"heat-d242-account-create-update-nl9k8\" (UID: \"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca\") " pod="openstack/heat-d242-account-create-update-nl9k8" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.822426 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/321a3852-3277-44d8-a126-a8549e29d224-operator-scripts\") pod \"cinder-b86a-account-create-update-zjg4j\" (UID: \"321a3852-3277-44d8-a126-a8549e29d224\") " pod="openstack/cinder-b86a-account-create-update-zjg4j" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.823132 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/321a3852-3277-44d8-a126-a8549e29d224-operator-scripts\") pod \"cinder-b86a-account-create-update-zjg4j\" (UID: \"321a3852-3277-44d8-a126-a8549e29d224\") " pod="openstack/cinder-b86a-account-create-update-zjg4j" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.823431 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9622e2e-7d39-4ec8-b6fc-580eee868216-operator-scripts\") pod \"heat-db-create-j5b5c\" (UID: \"d9622e2e-7d39-4ec8-b6fc-580eee868216\") " pod="openstack/heat-db-create-j5b5c" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.842265 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5djr\" (UniqueName: \"kubernetes.io/projected/321a3852-3277-44d8-a126-a8549e29d224-kube-api-access-q5djr\") pod \"cinder-b86a-account-create-update-zjg4j\" (UID: \"321a3852-3277-44d8-a126-a8549e29d224\") " pod="openstack/cinder-b86a-account-create-update-zjg4j" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.846230 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8m8n\" (UniqueName: \"kubernetes.io/projected/d9622e2e-7d39-4ec8-b6fc-580eee868216-kube-api-access-p8m8n\") pod \"heat-db-create-j5b5c\" (UID: \"d9622e2e-7d39-4ec8-b6fc-580eee868216\") " pod="openstack/heat-db-create-j5b5c" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.857874 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8e13-account-create-update-ztj9z"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.858805 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8e13-account-create-update-ztj9z" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.860951 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.867612 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8e13-account-create-update-ztj9z"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.870944 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-d2ls9" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.923949 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a211a30-fc89-4c07-afaf-e269d5ba2295-operator-scripts\") pod \"barbican-8e13-account-create-update-ztj9z\" (UID: \"8a211a30-fc89-4c07-afaf-e269d5ba2295\") " pod="openstack/barbican-8e13-account-create-update-ztj9z" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.924334 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfq9z\" (UniqueName: \"kubernetes.io/projected/8a211a30-fc89-4c07-afaf-e269d5ba2295-kube-api-access-pfq9z\") pod \"barbican-8e13-account-create-update-ztj9z\" (UID: \"8a211a30-fc89-4c07-afaf-e269d5ba2295\") " pod="openstack/barbican-8e13-account-create-update-ztj9z" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.924401 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-operator-scripts\") pod \"heat-d242-account-create-update-nl9k8\" (UID: \"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca\") " pod="openstack/heat-d242-account-create-update-nl9k8" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.924421 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvqsx\" (UniqueName: \"kubernetes.io/projected/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-kube-api-access-kvqsx\") pod \"heat-d242-account-create-update-nl9k8\" (UID: \"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca\") " pod="openstack/heat-d242-account-create-update-nl9k8" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.925439 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-operator-scripts\") pod \"heat-d242-account-create-update-nl9k8\" (UID: \"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca\") " pod="openstack/heat-d242-account-create-update-nl9k8" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.955110 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvqsx\" (UniqueName: \"kubernetes.io/projected/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-kube-api-access-kvqsx\") pod \"heat-d242-account-create-update-nl9k8\" (UID: \"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca\") " pod="openstack/heat-d242-account-create-update-nl9k8" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.960467 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-kf9l9"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.961539 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-kf9l9" Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.969971 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-kf9l9"] Dec 03 19:48:39 crc kubenswrapper[4916]: I1203 19:48:39.979939 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-j5b5c" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.006053 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b86a-account-create-update-zjg4j" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.026460 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfq9z\" (UniqueName: \"kubernetes.io/projected/8a211a30-fc89-4c07-afaf-e269d5ba2295-kube-api-access-pfq9z\") pod \"barbican-8e13-account-create-update-ztj9z\" (UID: \"8a211a30-fc89-4c07-afaf-e269d5ba2295\") " pod="openstack/barbican-8e13-account-create-update-ztj9z" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.026593 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a211a30-fc89-4c07-afaf-e269d5ba2295-operator-scripts\") pod \"barbican-8e13-account-create-update-ztj9z\" (UID: \"8a211a30-fc89-4c07-afaf-e269d5ba2295\") " pod="openstack/barbican-8e13-account-create-update-ztj9z" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.027326 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a211a30-fc89-4c07-afaf-e269d5ba2295-operator-scripts\") pod \"barbican-8e13-account-create-update-ztj9z\" (UID: \"8a211a30-fc89-4c07-afaf-e269d5ba2295\") " pod="openstack/barbican-8e13-account-create-update-ztj9z" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.043457 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-vzcmc"] Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.065323 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.067857 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.068054 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.071618 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfq9z\" (UniqueName: \"kubernetes.io/projected/8a211a30-fc89-4c07-afaf-e269d5ba2295-kube-api-access-pfq9z\") pod \"barbican-8e13-account-create-update-ztj9z\" (UID: \"8a211a30-fc89-4c07-afaf-e269d5ba2295\") " pod="openstack/barbican-8e13-account-create-update-ztj9z" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.071773 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.072027 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hvlw8" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.079014 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8e13-account-create-update-ztj9z" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.097383 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-d242-account-create-update-nl9k8" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.101636 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-8wqnf"] Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.103454 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8wqnf" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.109777 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-vzcmc"] Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.119814 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-1496-account-create-update-zgm7f"] Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.121161 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1496-account-create-update-zgm7f" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.124975 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.127533 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-8wqnf"] Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.128700 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ef23599-1005-496a-a421-e3f4300e8b5a-operator-scripts\") pod \"cinder-db-create-kf9l9\" (UID: \"2ef23599-1005-496a-a421-e3f4300e8b5a\") " pod="openstack/cinder-db-create-kf9l9" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.128864 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dljv\" (UniqueName: \"kubernetes.io/projected/2ef23599-1005-496a-a421-e3f4300e8b5a-kube-api-access-5dljv\") pod \"cinder-db-create-kf9l9\" (UID: \"2ef23599-1005-496a-a421-e3f4300e8b5a\") " pod="openstack/cinder-db-create-kf9l9" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.132692 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-1496-account-create-update-zgm7f"] Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.230481 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqkfg\" (UniqueName: \"kubernetes.io/projected/a17100cf-50d9-4a21-8d10-b1e49808fe53-kube-api-access-jqkfg\") pod \"neutron-db-create-8wqnf\" (UID: \"a17100cf-50d9-4a21-8d10-b1e49808fe53\") " pod="openstack/neutron-db-create-8wqnf" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.230536 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ef23599-1005-496a-a421-e3f4300e8b5a-operator-scripts\") pod \"cinder-db-create-kf9l9\" (UID: \"2ef23599-1005-496a-a421-e3f4300e8b5a\") " pod="openstack/cinder-db-create-kf9l9" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.230587 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-config-data\") pod \"keystone-db-sync-vzcmc\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.230618 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdpbd\" (UniqueName: \"kubernetes.io/projected/22a8aa59-7a7f-44f7-b766-2f1648211423-kube-api-access-kdpbd\") pod \"neutron-1496-account-create-update-zgm7f\" (UID: \"22a8aa59-7a7f-44f7-b766-2f1648211423\") " pod="openstack/neutron-1496-account-create-update-zgm7f" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.230637 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-combined-ca-bundle\") pod \"keystone-db-sync-vzcmc\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.230683 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a17100cf-50d9-4a21-8d10-b1e49808fe53-operator-scripts\") pod \"neutron-db-create-8wqnf\" (UID: \"a17100cf-50d9-4a21-8d10-b1e49808fe53\") " pod="openstack/neutron-db-create-8wqnf" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.230714 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dljv\" (UniqueName: \"kubernetes.io/projected/2ef23599-1005-496a-a421-e3f4300e8b5a-kube-api-access-5dljv\") pod \"cinder-db-create-kf9l9\" (UID: \"2ef23599-1005-496a-a421-e3f4300e8b5a\") " pod="openstack/cinder-db-create-kf9l9" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.230748 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22a8aa59-7a7f-44f7-b766-2f1648211423-operator-scripts\") pod \"neutron-1496-account-create-update-zgm7f\" (UID: \"22a8aa59-7a7f-44f7-b766-2f1648211423\") " pod="openstack/neutron-1496-account-create-update-zgm7f" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.230789 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xxgb\" (UniqueName: \"kubernetes.io/projected/94bf33bd-9ef5-41b9-820b-63fd78b3a384-kube-api-access-7xxgb\") pod \"keystone-db-sync-vzcmc\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.231385 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ef23599-1005-496a-a421-e3f4300e8b5a-operator-scripts\") pod \"cinder-db-create-kf9l9\" (UID: \"2ef23599-1005-496a-a421-e3f4300e8b5a\") " pod="openstack/cinder-db-create-kf9l9" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.277074 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dljv\" (UniqueName: \"kubernetes.io/projected/2ef23599-1005-496a-a421-e3f4300e8b5a-kube-api-access-5dljv\") pod \"cinder-db-create-kf9l9\" (UID: \"2ef23599-1005-496a-a421-e3f4300e8b5a\") " pod="openstack/cinder-db-create-kf9l9" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.341683 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a17100cf-50d9-4a21-8d10-b1e49808fe53-operator-scripts\") pod \"neutron-db-create-8wqnf\" (UID: \"a17100cf-50d9-4a21-8d10-b1e49808fe53\") " pod="openstack/neutron-db-create-8wqnf" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.341779 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22a8aa59-7a7f-44f7-b766-2f1648211423-operator-scripts\") pod \"neutron-1496-account-create-update-zgm7f\" (UID: \"22a8aa59-7a7f-44f7-b766-2f1648211423\") " pod="openstack/neutron-1496-account-create-update-zgm7f" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.341842 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xxgb\" (UniqueName: \"kubernetes.io/projected/94bf33bd-9ef5-41b9-820b-63fd78b3a384-kube-api-access-7xxgb\") pod \"keystone-db-sync-vzcmc\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.341869 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqkfg\" (UniqueName: \"kubernetes.io/projected/a17100cf-50d9-4a21-8d10-b1e49808fe53-kube-api-access-jqkfg\") pod \"neutron-db-create-8wqnf\" (UID: \"a17100cf-50d9-4a21-8d10-b1e49808fe53\") " pod="openstack/neutron-db-create-8wqnf" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.341912 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-config-data\") pod \"keystone-db-sync-vzcmc\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.341957 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdpbd\" (UniqueName: \"kubernetes.io/projected/22a8aa59-7a7f-44f7-b766-2f1648211423-kube-api-access-kdpbd\") pod \"neutron-1496-account-create-update-zgm7f\" (UID: \"22a8aa59-7a7f-44f7-b766-2f1648211423\") " pod="openstack/neutron-1496-account-create-update-zgm7f" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.341991 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-combined-ca-bundle\") pod \"keystone-db-sync-vzcmc\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.343843 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a17100cf-50d9-4a21-8d10-b1e49808fe53-operator-scripts\") pod \"neutron-db-create-8wqnf\" (UID: \"a17100cf-50d9-4a21-8d10-b1e49808fe53\") " pod="openstack/neutron-db-create-8wqnf" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.344476 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22a8aa59-7a7f-44f7-b766-2f1648211423-operator-scripts\") pod \"neutron-1496-account-create-update-zgm7f\" (UID: \"22a8aa59-7a7f-44f7-b766-2f1648211423\") " pod="openstack/neutron-1496-account-create-update-zgm7f" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.349509 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-combined-ca-bundle\") pod \"keystone-db-sync-vzcmc\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.377607 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-config-data\") pod \"keystone-db-sync-vzcmc\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.380696 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqkfg\" (UniqueName: \"kubernetes.io/projected/a17100cf-50d9-4a21-8d10-b1e49808fe53-kube-api-access-jqkfg\") pod \"neutron-db-create-8wqnf\" (UID: \"a17100cf-50d9-4a21-8d10-b1e49808fe53\") " pod="openstack/neutron-db-create-8wqnf" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.383192 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdpbd\" (UniqueName: \"kubernetes.io/projected/22a8aa59-7a7f-44f7-b766-2f1648211423-kube-api-access-kdpbd\") pod \"neutron-1496-account-create-update-zgm7f\" (UID: \"22a8aa59-7a7f-44f7-b766-2f1648211423\") " pod="openstack/neutron-1496-account-create-update-zgm7f" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.391746 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xxgb\" (UniqueName: \"kubernetes.io/projected/94bf33bd-9ef5-41b9-820b-63fd78b3a384-kube-api-access-7xxgb\") pod \"keystone-db-sync-vzcmc\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.410949 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-kf9l9" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.417990 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.440315 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8wqnf" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.460928 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-b86a-account-create-update-zjg4j"] Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.462004 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1496-account-create-update-zgm7f" Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.497548 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-d2ls9"] Dec 03 19:48:40 crc kubenswrapper[4916]: W1203 19:48:40.536779 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97246a98_afa5_477d_9528_19c6fd55a094.slice/crio-b29b46194808426604a04fdf5e271bc2acf6588b5fbc40f74d216489a0b9c3a8 WatchSource:0}: Error finding container b29b46194808426604a04fdf5e271bc2acf6588b5fbc40f74d216489a0b9c3a8: Status 404 returned error can't find the container with id b29b46194808426604a04fdf5e271bc2acf6588b5fbc40f74d216489a0b9c3a8 Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.541346 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-j5b5c"] Dec 03 19:48:40 crc kubenswrapper[4916]: W1203 19:48:40.871056 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a211a30_fc89_4c07_afaf_e269d5ba2295.slice/crio-e93cab5e40bca777e877e98d7679c5c1f24803a08af33d7b50faf4ab3a8ec766 WatchSource:0}: Error finding container e93cab5e40bca777e877e98d7679c5c1f24803a08af33d7b50faf4ab3a8ec766: Status 404 returned error can't find the container with id e93cab5e40bca777e877e98d7679c5c1f24803a08af33d7b50faf4ab3a8ec766 Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.874657 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8e13-account-create-update-ztj9z"] Dec 03 19:48:40 crc kubenswrapper[4916]: I1203 19:48:40.896248 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-d242-account-create-update-nl9k8"] Dec 03 19:48:41 crc kubenswrapper[4916]: I1203 19:48:41.012687 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-b86a-account-create-update-zjg4j" event={"ID":"321a3852-3277-44d8-a126-a8549e29d224","Type":"ContainerStarted","Data":"ef152848f56bf90a16191e044aeb50b166b37181428fd6649b752e6eb7f7c48b"} Dec 03 19:48:41 crc kubenswrapper[4916]: I1203 19:48:41.013592 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-j5b5c" event={"ID":"d9622e2e-7d39-4ec8-b6fc-580eee868216","Type":"ContainerStarted","Data":"520ce2cdab25f9123bc0e32ffe8f0ba0c4aade7e12fcac69d334c5637cde7ac9"} Dec 03 19:48:41 crc kubenswrapper[4916]: I1203 19:48:41.015267 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-d2ls9" event={"ID":"97246a98-afa5-477d-9528-19c6fd55a094","Type":"ContainerStarted","Data":"b29b46194808426604a04fdf5e271bc2acf6588b5fbc40f74d216489a0b9c3a8"} Dec 03 19:48:41 crc kubenswrapper[4916]: I1203 19:48:41.023832 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-d242-account-create-update-nl9k8" event={"ID":"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca","Type":"ContainerStarted","Data":"74606ce33185b932df87d5dc1f69bf54b37d6c1f3aa78e6f8d65c1436ae954ad"} Dec 03 19:48:41 crc kubenswrapper[4916]: I1203 19:48:41.033327 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8e13-account-create-update-ztj9z" event={"ID":"8a211a30-fc89-4c07-afaf-e269d5ba2295","Type":"ContainerStarted","Data":"e93cab5e40bca777e877e98d7679c5c1f24803a08af33d7b50faf4ab3a8ec766"} Dec 03 19:48:41 crc kubenswrapper[4916]: I1203 19:48:41.230213 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-vzcmc"] Dec 03 19:48:41 crc kubenswrapper[4916]: I1203 19:48:41.247656 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-kf9l9"] Dec 03 19:48:41 crc kubenswrapper[4916]: I1203 19:48:41.255686 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-1496-account-create-update-zgm7f"] Dec 03 19:48:41 crc kubenswrapper[4916]: W1203 19:48:41.264592 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ef23599_1005_496a_a421_e3f4300e8b5a.slice/crio-77c19e92ad7b158fd3f25637af577899ab2cd036b27a8e9a9dde7cb0273e64a6 WatchSource:0}: Error finding container 77c19e92ad7b158fd3f25637af577899ab2cd036b27a8e9a9dde7cb0273e64a6: Status 404 returned error can't find the container with id 77c19e92ad7b158fd3f25637af577899ab2cd036b27a8e9a9dde7cb0273e64a6 Dec 03 19:48:41 crc kubenswrapper[4916]: W1203 19:48:41.280532 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22a8aa59_7a7f_44f7_b766_2f1648211423.slice/crio-a84545fabba5cbfbdb934dfd792764ae29b60e31d2219651c4753fa3ad5e0fa4 WatchSource:0}: Error finding container a84545fabba5cbfbdb934dfd792764ae29b60e31d2219651c4753fa3ad5e0fa4: Status 404 returned error can't find the container with id a84545fabba5cbfbdb934dfd792764ae29b60e31d2219651c4753fa3ad5e0fa4 Dec 03 19:48:41 crc kubenswrapper[4916]: I1203 19:48:41.406910 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-8wqnf"] Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.042778 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8e13-account-create-update-ztj9z" event={"ID":"8a211a30-fc89-4c07-afaf-e269d5ba2295","Type":"ContainerStarted","Data":"03dedab6881211d06323fe2a0f89529ee55f7ce2774f2e20f01d55e629d98895"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.044517 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-j5b5c" event={"ID":"d9622e2e-7d39-4ec8-b6fc-580eee868216","Type":"ContainerStarted","Data":"1d55eef93911096e6d3efc48bf0bccf325e4086b99630d17caf739f48d45e7b3"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.046877 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1496-account-create-update-zgm7f" event={"ID":"22a8aa59-7a7f-44f7-b766-2f1648211423","Type":"ContainerStarted","Data":"845fa2c517ce949fb569fa0b08ea12fa5c8b19ea64af16121965f23ae2b1f734"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.046980 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1496-account-create-update-zgm7f" event={"ID":"22a8aa59-7a7f-44f7-b766-2f1648211423","Type":"ContainerStarted","Data":"a84545fabba5cbfbdb934dfd792764ae29b60e31d2219651c4753fa3ad5e0fa4"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.051210 4916 generic.go:334] "Generic (PLEG): container finished" podID="97246a98-afa5-477d-9528-19c6fd55a094" containerID="20268ae426aac796e7f96a8d537347861fb149c9a7ff8e5f5579ffdc9d0f8b32" exitCode=0 Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.051347 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-d2ls9" event={"ID":"97246a98-afa5-477d-9528-19c6fd55a094","Type":"ContainerDied","Data":"20268ae426aac796e7f96a8d537347861fb149c9a7ff8e5f5579ffdc9d0f8b32"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.052414 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vzcmc" event={"ID":"94bf33bd-9ef5-41b9-820b-63fd78b3a384","Type":"ContainerStarted","Data":"1c00d5546b0f7ad2fcd43fe243f65498a97022e76d11f36ac02817d9a0821d15"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.053790 4916 generic.go:334] "Generic (PLEG): container finished" podID="2ef23599-1005-496a-a421-e3f4300e8b5a" containerID="3e88fafc9520cb9cb1417d38f0f7fd7eeb8b2f6da42e889aae28e92b2936dad2" exitCode=0 Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.053898 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-kf9l9" event={"ID":"2ef23599-1005-496a-a421-e3f4300e8b5a","Type":"ContainerDied","Data":"3e88fafc9520cb9cb1417d38f0f7fd7eeb8b2f6da42e889aae28e92b2936dad2"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.053985 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-kf9l9" event={"ID":"2ef23599-1005-496a-a421-e3f4300e8b5a","Type":"ContainerStarted","Data":"77c19e92ad7b158fd3f25637af577899ab2cd036b27a8e9a9dde7cb0273e64a6"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.058076 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8wqnf" event={"ID":"a17100cf-50d9-4a21-8d10-b1e49808fe53","Type":"ContainerStarted","Data":"29fc3d06133cde71b2a9f2376c331c9a8355540d0837281afd8c8404a15199eb"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.058208 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8wqnf" event={"ID":"a17100cf-50d9-4a21-8d10-b1e49808fe53","Type":"ContainerStarted","Data":"0931baa8913fd37ccff7740039243143e118cc45d2fd5433827f9b896836ae53"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.059773 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-d242-account-create-update-nl9k8" event={"ID":"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca","Type":"ContainerStarted","Data":"72c2b057142a7c630bdcfa408aaca20533cfe49d30e66cb39f40bde8787e777e"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.064684 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-8e13-account-create-update-ztj9z" podStartSLOduration=3.064665633 podStartE2EDuration="3.064665633s" podCreationTimestamp="2025-12-03 19:48:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:42.06191738 +0000 UTC m=+1138.024727646" watchObservedRunningTime="2025-12-03 19:48:42.064665633 +0000 UTC m=+1138.027475899" Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.068627 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-b86a-account-create-update-zjg4j" event={"ID":"321a3852-3277-44d8-a126-a8549e29d224","Type":"ContainerStarted","Data":"176fbe21f4e8f33f27179fe19cd6a1b4a4173d0d4f30f7180a97e7d2448063d2"} Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.138187 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-8wqnf" podStartSLOduration=2.138168782 podStartE2EDuration="2.138168782s" podCreationTimestamp="2025-12-03 19:48:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:42.127369784 +0000 UTC m=+1138.090180070" watchObservedRunningTime="2025-12-03 19:48:42.138168782 +0000 UTC m=+1138.100979048" Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.153846 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-d242-account-create-update-nl9k8" podStartSLOduration=3.153808319 podStartE2EDuration="3.153808319s" podCreationTimestamp="2025-12-03 19:48:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:42.145379504 +0000 UTC m=+1138.108189760" watchObservedRunningTime="2025-12-03 19:48:42.153808319 +0000 UTC m=+1138.116618585" Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.167869 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-1496-account-create-update-zgm7f" podStartSLOduration=2.1678487029999998 podStartE2EDuration="2.167848703s" podCreationTimestamp="2025-12-03 19:48:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:42.167170745 +0000 UTC m=+1138.129981011" watchObservedRunningTime="2025-12-03 19:48:42.167848703 +0000 UTC m=+1138.130658969" Dec 03 19:48:42 crc kubenswrapper[4916]: I1203 19:48:42.194481 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-b86a-account-create-update-zjg4j" podStartSLOduration=3.194462713 podStartE2EDuration="3.194462713s" podCreationTimestamp="2025-12-03 19:48:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:42.187406885 +0000 UTC m=+1138.150217161" watchObservedRunningTime="2025-12-03 19:48:42.194462713 +0000 UTC m=+1138.157272979" Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.084172 4916 generic.go:334] "Generic (PLEG): container finished" podID="a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca" containerID="72c2b057142a7c630bdcfa408aaca20533cfe49d30e66cb39f40bde8787e777e" exitCode=0 Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.084241 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-d242-account-create-update-nl9k8" event={"ID":"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca","Type":"ContainerDied","Data":"72c2b057142a7c630bdcfa408aaca20533cfe49d30e66cb39f40bde8787e777e"} Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.086766 4916 generic.go:334] "Generic (PLEG): container finished" podID="8a211a30-fc89-4c07-afaf-e269d5ba2295" containerID="03dedab6881211d06323fe2a0f89529ee55f7ce2774f2e20f01d55e629d98895" exitCode=0 Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.086826 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8e13-account-create-update-ztj9z" event={"ID":"8a211a30-fc89-4c07-afaf-e269d5ba2295","Type":"ContainerDied","Data":"03dedab6881211d06323fe2a0f89529ee55f7ce2774f2e20f01d55e629d98895"} Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.088779 4916 generic.go:334] "Generic (PLEG): container finished" podID="321a3852-3277-44d8-a126-a8549e29d224" containerID="176fbe21f4e8f33f27179fe19cd6a1b4a4173d0d4f30f7180a97e7d2448063d2" exitCode=0 Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.089169 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-b86a-account-create-update-zjg4j" event={"ID":"321a3852-3277-44d8-a126-a8549e29d224","Type":"ContainerDied","Data":"176fbe21f4e8f33f27179fe19cd6a1b4a4173d0d4f30f7180a97e7d2448063d2"} Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.090351 4916 generic.go:334] "Generic (PLEG): container finished" podID="d9622e2e-7d39-4ec8-b6fc-580eee868216" containerID="1d55eef93911096e6d3efc48bf0bccf325e4086b99630d17caf739f48d45e7b3" exitCode=0 Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.090394 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-j5b5c" event={"ID":"d9622e2e-7d39-4ec8-b6fc-580eee868216","Type":"ContainerDied","Data":"1d55eef93911096e6d3efc48bf0bccf325e4086b99630d17caf739f48d45e7b3"} Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.093499 4916 generic.go:334] "Generic (PLEG): container finished" podID="a17100cf-50d9-4a21-8d10-b1e49808fe53" containerID="29fc3d06133cde71b2a9f2376c331c9a8355540d0837281afd8c8404a15199eb" exitCode=0 Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.093611 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8wqnf" event={"ID":"a17100cf-50d9-4a21-8d10-b1e49808fe53","Type":"ContainerDied","Data":"29fc3d06133cde71b2a9f2376c331c9a8355540d0837281afd8c8404a15199eb"} Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.097853 4916 generic.go:334] "Generic (PLEG): container finished" podID="22a8aa59-7a7f-44f7-b766-2f1648211423" containerID="845fa2c517ce949fb569fa0b08ea12fa5c8b19ea64af16121965f23ae2b1f734" exitCode=0 Dec 03 19:48:43 crc kubenswrapper[4916]: I1203 19:48:43.098158 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1496-account-create-update-zgm7f" event={"ID":"22a8aa59-7a7f-44f7-b766-2f1648211423","Type":"ContainerDied","Data":"845fa2c517ce949fb569fa0b08ea12fa5c8b19ea64af16121965f23ae2b1f734"} Dec 03 19:48:45 crc kubenswrapper[4916]: I1203 19:48:45.807931 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:48:45 crc kubenswrapper[4916]: I1203 19:48:45.905879 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-cx5z4"] Dec 03 19:48:45 crc kubenswrapper[4916]: I1203 19:48:45.906190 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-cx5z4" podUID="75d64faf-6ddb-49fc-8eda-03cc5c10233e" containerName="dnsmasq-dns" containerID="cri-o://1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a" gracePeriod=10 Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.746838 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1496-account-create-update-zgm7f" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.757666 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-j5b5c" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.770338 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-kf9l9" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.787092 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-d2ls9" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.794734 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8wqnf" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.812953 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-d242-account-create-update-nl9k8" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.864017 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8e13-account-create-update-ztj9z" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.867226 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b86a-account-create-update-zjg4j" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915419 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9622e2e-7d39-4ec8-b6fc-580eee868216-operator-scripts\") pod \"d9622e2e-7d39-4ec8-b6fc-580eee868216\" (UID: \"d9622e2e-7d39-4ec8-b6fc-580eee868216\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915463 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-operator-scripts\") pod \"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca\" (UID: \"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915509 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8m8n\" (UniqueName: \"kubernetes.io/projected/d9622e2e-7d39-4ec8-b6fc-580eee868216-kube-api-access-p8m8n\") pod \"d9622e2e-7d39-4ec8-b6fc-580eee868216\" (UID: \"d9622e2e-7d39-4ec8-b6fc-580eee868216\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915553 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqkfg\" (UniqueName: \"kubernetes.io/projected/a17100cf-50d9-4a21-8d10-b1e49808fe53-kube-api-access-jqkfg\") pod \"a17100cf-50d9-4a21-8d10-b1e49808fe53\" (UID: \"a17100cf-50d9-4a21-8d10-b1e49808fe53\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915602 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97246a98-afa5-477d-9528-19c6fd55a094-operator-scripts\") pod \"97246a98-afa5-477d-9528-19c6fd55a094\" (UID: \"97246a98-afa5-477d-9528-19c6fd55a094\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915626 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-267xc\" (UniqueName: \"kubernetes.io/projected/97246a98-afa5-477d-9528-19c6fd55a094-kube-api-access-267xc\") pod \"97246a98-afa5-477d-9528-19c6fd55a094\" (UID: \"97246a98-afa5-477d-9528-19c6fd55a094\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915723 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvqsx\" (UniqueName: \"kubernetes.io/projected/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-kube-api-access-kvqsx\") pod \"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca\" (UID: \"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915771 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ef23599-1005-496a-a421-e3f4300e8b5a-operator-scripts\") pod \"2ef23599-1005-496a-a421-e3f4300e8b5a\" (UID: \"2ef23599-1005-496a-a421-e3f4300e8b5a\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915809 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdpbd\" (UniqueName: \"kubernetes.io/projected/22a8aa59-7a7f-44f7-b766-2f1648211423-kube-api-access-kdpbd\") pod \"22a8aa59-7a7f-44f7-b766-2f1648211423\" (UID: \"22a8aa59-7a7f-44f7-b766-2f1648211423\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915843 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22a8aa59-7a7f-44f7-b766-2f1648211423-operator-scripts\") pod \"22a8aa59-7a7f-44f7-b766-2f1648211423\" (UID: \"22a8aa59-7a7f-44f7-b766-2f1648211423\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915864 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a17100cf-50d9-4a21-8d10-b1e49808fe53-operator-scripts\") pod \"a17100cf-50d9-4a21-8d10-b1e49808fe53\" (UID: \"a17100cf-50d9-4a21-8d10-b1e49808fe53\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.915912 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dljv\" (UniqueName: \"kubernetes.io/projected/2ef23599-1005-496a-a421-e3f4300e8b5a-kube-api-access-5dljv\") pod \"2ef23599-1005-496a-a421-e3f4300e8b5a\" (UID: \"2ef23599-1005-496a-a421-e3f4300e8b5a\") " Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.916135 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca" (UID: "a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.916191 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9622e2e-7d39-4ec8-b6fc-580eee868216-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d9622e2e-7d39-4ec8-b6fc-580eee868216" (UID: "d9622e2e-7d39-4ec8-b6fc-580eee868216"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.916512 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9622e2e-7d39-4ec8-b6fc-580eee868216-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.916534 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.916670 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ef23599-1005-496a-a421-e3f4300e8b5a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2ef23599-1005-496a-a421-e3f4300e8b5a" (UID: "2ef23599-1005-496a-a421-e3f4300e8b5a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.917160 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97246a98-afa5-477d-9528-19c6fd55a094-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "97246a98-afa5-477d-9528-19c6fd55a094" (UID: "97246a98-afa5-477d-9528-19c6fd55a094"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.918159 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22a8aa59-7a7f-44f7-b766-2f1648211423-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "22a8aa59-7a7f-44f7-b766-2f1648211423" (UID: "22a8aa59-7a7f-44f7-b766-2f1648211423"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.918196 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a17100cf-50d9-4a21-8d10-b1e49808fe53-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a17100cf-50d9-4a21-8d10-b1e49808fe53" (UID: "a17100cf-50d9-4a21-8d10-b1e49808fe53"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.921403 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ef23599-1005-496a-a421-e3f4300e8b5a-kube-api-access-5dljv" (OuterVolumeSpecName: "kube-api-access-5dljv") pod "2ef23599-1005-496a-a421-e3f4300e8b5a" (UID: "2ef23599-1005-496a-a421-e3f4300e8b5a"). InnerVolumeSpecName "kube-api-access-5dljv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.921463 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9622e2e-7d39-4ec8-b6fc-580eee868216-kube-api-access-p8m8n" (OuterVolumeSpecName: "kube-api-access-p8m8n") pod "d9622e2e-7d39-4ec8-b6fc-580eee868216" (UID: "d9622e2e-7d39-4ec8-b6fc-580eee868216"). InnerVolumeSpecName "kube-api-access-p8m8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.922348 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97246a98-afa5-477d-9528-19c6fd55a094-kube-api-access-267xc" (OuterVolumeSpecName: "kube-api-access-267xc") pod "97246a98-afa5-477d-9528-19c6fd55a094" (UID: "97246a98-afa5-477d-9528-19c6fd55a094"). InnerVolumeSpecName "kube-api-access-267xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.923274 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22a8aa59-7a7f-44f7-b766-2f1648211423-kube-api-access-kdpbd" (OuterVolumeSpecName: "kube-api-access-kdpbd") pod "22a8aa59-7a7f-44f7-b766-2f1648211423" (UID: "22a8aa59-7a7f-44f7-b766-2f1648211423"). InnerVolumeSpecName "kube-api-access-kdpbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.923427 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a17100cf-50d9-4a21-8d10-b1e49808fe53-kube-api-access-jqkfg" (OuterVolumeSpecName: "kube-api-access-jqkfg") pod "a17100cf-50d9-4a21-8d10-b1e49808fe53" (UID: "a17100cf-50d9-4a21-8d10-b1e49808fe53"). InnerVolumeSpecName "kube-api-access-jqkfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.925703 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-kube-api-access-kvqsx" (OuterVolumeSpecName: "kube-api-access-kvqsx") pod "a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca" (UID: "a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca"). InnerVolumeSpecName "kube-api-access-kvqsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:48 crc kubenswrapper[4916]: I1203 19:48:48.969459 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018240 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfq9z\" (UniqueName: \"kubernetes.io/projected/8a211a30-fc89-4c07-afaf-e269d5ba2295-kube-api-access-pfq9z\") pod \"8a211a30-fc89-4c07-afaf-e269d5ba2295\" (UID: \"8a211a30-fc89-4c07-afaf-e269d5ba2295\") " Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018365 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5djr\" (UniqueName: \"kubernetes.io/projected/321a3852-3277-44d8-a126-a8549e29d224-kube-api-access-q5djr\") pod \"321a3852-3277-44d8-a126-a8549e29d224\" (UID: \"321a3852-3277-44d8-a126-a8549e29d224\") " Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018390 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/321a3852-3277-44d8-a126-a8549e29d224-operator-scripts\") pod \"321a3852-3277-44d8-a126-a8549e29d224\" (UID: \"321a3852-3277-44d8-a126-a8549e29d224\") " Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018420 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a211a30-fc89-4c07-afaf-e269d5ba2295-operator-scripts\") pod \"8a211a30-fc89-4c07-afaf-e269d5ba2295\" (UID: \"8a211a30-fc89-4c07-afaf-e269d5ba2295\") " Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018806 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvqsx\" (UniqueName: \"kubernetes.io/projected/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca-kube-api-access-kvqsx\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018828 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ef23599-1005-496a-a421-e3f4300e8b5a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018841 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdpbd\" (UniqueName: \"kubernetes.io/projected/22a8aa59-7a7f-44f7-b766-2f1648211423-kube-api-access-kdpbd\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018854 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22a8aa59-7a7f-44f7-b766-2f1648211423-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018865 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a17100cf-50d9-4a21-8d10-b1e49808fe53-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018878 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dljv\" (UniqueName: \"kubernetes.io/projected/2ef23599-1005-496a-a421-e3f4300e8b5a-kube-api-access-5dljv\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018891 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8m8n\" (UniqueName: \"kubernetes.io/projected/d9622e2e-7d39-4ec8-b6fc-580eee868216-kube-api-access-p8m8n\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018902 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqkfg\" (UniqueName: \"kubernetes.io/projected/a17100cf-50d9-4a21-8d10-b1e49808fe53-kube-api-access-jqkfg\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018914 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97246a98-afa5-477d-9528-19c6fd55a094-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.018926 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-267xc\" (UniqueName: \"kubernetes.io/projected/97246a98-afa5-477d-9528-19c6fd55a094-kube-api-access-267xc\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.019499 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a211a30-fc89-4c07-afaf-e269d5ba2295-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8a211a30-fc89-4c07-afaf-e269d5ba2295" (UID: "8a211a30-fc89-4c07-afaf-e269d5ba2295"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.020513 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/321a3852-3277-44d8-a126-a8549e29d224-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "321a3852-3277-44d8-a126-a8549e29d224" (UID: "321a3852-3277-44d8-a126-a8549e29d224"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.024186 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/321a3852-3277-44d8-a126-a8549e29d224-kube-api-access-q5djr" (OuterVolumeSpecName: "kube-api-access-q5djr") pod "321a3852-3277-44d8-a126-a8549e29d224" (UID: "321a3852-3277-44d8-a126-a8549e29d224"). InnerVolumeSpecName "kube-api-access-q5djr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.025262 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a211a30-fc89-4c07-afaf-e269d5ba2295-kube-api-access-pfq9z" (OuterVolumeSpecName: "kube-api-access-pfq9z") pod "8a211a30-fc89-4c07-afaf-e269d5ba2295" (UID: "8a211a30-fc89-4c07-afaf-e269d5ba2295"). InnerVolumeSpecName "kube-api-access-pfq9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.119800 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-dns-svc\") pod \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.119875 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-sb\") pod \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.120130 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-nb\") pod \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.120236 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-config\") pod \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.120400 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtl6h\" (UniqueName: \"kubernetes.io/projected/75d64faf-6ddb-49fc-8eda-03cc5c10233e-kube-api-access-wtl6h\") pod \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\" (UID: \"75d64faf-6ddb-49fc-8eda-03cc5c10233e\") " Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.120965 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5djr\" (UniqueName: \"kubernetes.io/projected/321a3852-3277-44d8-a126-a8549e29d224-kube-api-access-q5djr\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.120995 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/321a3852-3277-44d8-a126-a8549e29d224-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.121013 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8a211a30-fc89-4c07-afaf-e269d5ba2295-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.121031 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfq9z\" (UniqueName: \"kubernetes.io/projected/8a211a30-fc89-4c07-afaf-e269d5ba2295-kube-api-access-pfq9z\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.125555 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75d64faf-6ddb-49fc-8eda-03cc5c10233e-kube-api-access-wtl6h" (OuterVolumeSpecName: "kube-api-access-wtl6h") pod "75d64faf-6ddb-49fc-8eda-03cc5c10233e" (UID: "75d64faf-6ddb-49fc-8eda-03cc5c10233e"). InnerVolumeSpecName "kube-api-access-wtl6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.165992 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vzcmc" event={"ID":"94bf33bd-9ef5-41b9-820b-63fd78b3a384","Type":"ContainerStarted","Data":"41b13d31b1ca8af0cbcc8ee6cce215cd6a431c652bf14b78cf3e2d872fe7e0fb"} Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.170698 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-b86a-account-create-update-zjg4j" event={"ID":"321a3852-3277-44d8-a126-a8549e29d224","Type":"ContainerDied","Data":"ef152848f56bf90a16191e044aeb50b166b37181428fd6649b752e6eb7f7c48b"} Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.170741 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef152848f56bf90a16191e044aeb50b166b37181428fd6649b752e6eb7f7c48b" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.170802 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-b86a-account-create-update-zjg4j" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.177089 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-kf9l9" event={"ID":"2ef23599-1005-496a-a421-e3f4300e8b5a","Type":"ContainerDied","Data":"77c19e92ad7b158fd3f25637af577899ab2cd036b27a8e9a9dde7cb0273e64a6"} Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.177134 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77c19e92ad7b158fd3f25637af577899ab2cd036b27a8e9a9dde7cb0273e64a6" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.177236 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-kf9l9" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.183470 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1496-account-create-update-zgm7f" event={"ID":"22a8aa59-7a7f-44f7-b766-2f1648211423","Type":"ContainerDied","Data":"a84545fabba5cbfbdb934dfd792764ae29b60e31d2219651c4753fa3ad5e0fa4"} Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.183518 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a84545fabba5cbfbdb934dfd792764ae29b60e31d2219651c4753fa3ad5e0fa4" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.183647 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1496-account-create-update-zgm7f" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.183782 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-config" (OuterVolumeSpecName: "config") pod "75d64faf-6ddb-49fc-8eda-03cc5c10233e" (UID: "75d64faf-6ddb-49fc-8eda-03cc5c10233e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.190800 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-vzcmc" podStartSLOduration=1.865189676 podStartE2EDuration="9.190774499s" podCreationTimestamp="2025-12-03 19:48:40 +0000 UTC" firstStartedPulling="2025-12-03 19:48:41.260043507 +0000 UTC m=+1137.222853773" lastFinishedPulling="2025-12-03 19:48:48.58562832 +0000 UTC m=+1144.548438596" observedRunningTime="2025-12-03 19:48:49.1874205 +0000 UTC m=+1145.150230776" watchObservedRunningTime="2025-12-03 19:48:49.190774499 +0000 UTC m=+1145.153584805" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.193797 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-8wqnf" event={"ID":"a17100cf-50d9-4a21-8d10-b1e49808fe53","Type":"ContainerDied","Data":"0931baa8913fd37ccff7740039243143e118cc45d2fd5433827f9b896836ae53"} Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.193845 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0931baa8913fd37ccff7740039243143e118cc45d2fd5433827f9b896836ae53" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.193926 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-8wqnf" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.193957 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "75d64faf-6ddb-49fc-8eda-03cc5c10233e" (UID: "75d64faf-6ddb-49fc-8eda-03cc5c10233e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.196537 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-d2ls9" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.196538 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-d2ls9" event={"ID":"97246a98-afa5-477d-9528-19c6fd55a094","Type":"ContainerDied","Data":"b29b46194808426604a04fdf5e271bc2acf6588b5fbc40f74d216489a0b9c3a8"} Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.196717 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b29b46194808426604a04fdf5e271bc2acf6588b5fbc40f74d216489a0b9c3a8" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.200040 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-d242-account-create-update-nl9k8" event={"ID":"a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca","Type":"ContainerDied","Data":"74606ce33185b932df87d5dc1f69bf54b37d6c1f3aa78e6f8d65c1436ae954ad"} Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.200073 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74606ce33185b932df87d5dc1f69bf54b37d6c1f3aa78e6f8d65c1436ae954ad" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.200118 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-d242-account-create-update-nl9k8" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.204556 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8e13-account-create-update-ztj9z" event={"ID":"8a211a30-fc89-4c07-afaf-e269d5ba2295","Type":"ContainerDied","Data":"e93cab5e40bca777e877e98d7679c5c1f24803a08af33d7b50faf4ab3a8ec766"} Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.204613 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8e13-account-create-update-ztj9z" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.204622 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e93cab5e40bca777e877e98d7679c5c1f24803a08af33d7b50faf4ab3a8ec766" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.207140 4916 generic.go:334] "Generic (PLEG): container finished" podID="75d64faf-6ddb-49fc-8eda-03cc5c10233e" containerID="1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a" exitCode=0 Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.207209 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-cx5z4" event={"ID":"75d64faf-6ddb-49fc-8eda-03cc5c10233e","Type":"ContainerDied","Data":"1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a"} Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.207238 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-cx5z4" event={"ID":"75d64faf-6ddb-49fc-8eda-03cc5c10233e","Type":"ContainerDied","Data":"b40f84560cd10635e85f2c66411e958b8548a0498e28164248acd3f06a909682"} Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.207259 4916 scope.go:117] "RemoveContainer" containerID="1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.207388 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-cx5z4" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.208072 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "75d64faf-6ddb-49fc-8eda-03cc5c10233e" (UID: "75d64faf-6ddb-49fc-8eda-03cc5c10233e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.209090 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-j5b5c" event={"ID":"d9622e2e-7d39-4ec8-b6fc-580eee868216","Type":"ContainerDied","Data":"520ce2cdab25f9123bc0e32ffe8f0ba0c4aade7e12fcac69d334c5637cde7ac9"} Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.209141 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="520ce2cdab25f9123bc0e32ffe8f0ba0c4aade7e12fcac69d334c5637cde7ac9" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.209275 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-j5b5c" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.214915 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "75d64faf-6ddb-49fc-8eda-03cc5c10233e" (UID: "75d64faf-6ddb-49fc-8eda-03cc5c10233e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.223099 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.223125 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.223135 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.223174 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75d64faf-6ddb-49fc-8eda-03cc5c10233e-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.223184 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtl6h\" (UniqueName: \"kubernetes.io/projected/75d64faf-6ddb-49fc-8eda-03cc5c10233e-kube-api-access-wtl6h\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.237765 4916 scope.go:117] "RemoveContainer" containerID="14f535fa5e6b674e4a9df31087f5b7fffa4c1f73814d7dff047acbf7fbd1f78a" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.282434 4916 scope.go:117] "RemoveContainer" containerID="1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a" Dec 03 19:48:49 crc kubenswrapper[4916]: E1203 19:48:49.283053 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a\": container with ID starting with 1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a not found: ID does not exist" containerID="1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.283115 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a"} err="failed to get container status \"1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a\": rpc error: code = NotFound desc = could not find container \"1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a\": container with ID starting with 1e4dea119aa212b72caade4bd5bb978d3e864da168f42b5be259fb576348f59a not found: ID does not exist" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.283159 4916 scope.go:117] "RemoveContainer" containerID="14f535fa5e6b674e4a9df31087f5b7fffa4c1f73814d7dff047acbf7fbd1f78a" Dec 03 19:48:49 crc kubenswrapper[4916]: E1203 19:48:49.283978 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14f535fa5e6b674e4a9df31087f5b7fffa4c1f73814d7dff047acbf7fbd1f78a\": container with ID starting with 14f535fa5e6b674e4a9df31087f5b7fffa4c1f73814d7dff047acbf7fbd1f78a not found: ID does not exist" containerID="14f535fa5e6b674e4a9df31087f5b7fffa4c1f73814d7dff047acbf7fbd1f78a" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.284023 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14f535fa5e6b674e4a9df31087f5b7fffa4c1f73814d7dff047acbf7fbd1f78a"} err="failed to get container status \"14f535fa5e6b674e4a9df31087f5b7fffa4c1f73814d7dff047acbf7fbd1f78a\": rpc error: code = NotFound desc = could not find container \"14f535fa5e6b674e4a9df31087f5b7fffa4c1f73814d7dff047acbf7fbd1f78a\": container with ID starting with 14f535fa5e6b674e4a9df31087f5b7fffa4c1f73814d7dff047acbf7fbd1f78a not found: ID does not exist" Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.581198 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-cx5z4"] Dec 03 19:48:49 crc kubenswrapper[4916]: I1203 19:48:49.590607 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-cx5z4"] Dec 03 19:48:50 crc kubenswrapper[4916]: I1203 19:48:50.490260 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75d64faf-6ddb-49fc-8eda-03cc5c10233e" path="/var/lib/kubelet/pods/75d64faf-6ddb-49fc-8eda-03cc5c10233e/volumes" Dec 03 19:48:52 crc kubenswrapper[4916]: I1203 19:48:52.248683 4916 generic.go:334] "Generic (PLEG): container finished" podID="94bf33bd-9ef5-41b9-820b-63fd78b3a384" containerID="41b13d31b1ca8af0cbcc8ee6cce215cd6a431c652bf14b78cf3e2d872fe7e0fb" exitCode=0 Dec 03 19:48:52 crc kubenswrapper[4916]: I1203 19:48:52.248767 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vzcmc" event={"ID":"94bf33bd-9ef5-41b9-820b-63fd78b3a384","Type":"ContainerDied","Data":"41b13d31b1ca8af0cbcc8ee6cce215cd6a431c652bf14b78cf3e2d872fe7e0fb"} Dec 03 19:48:53 crc kubenswrapper[4916]: I1203 19:48:53.629498 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:53 crc kubenswrapper[4916]: I1203 19:48:53.799740 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xxgb\" (UniqueName: \"kubernetes.io/projected/94bf33bd-9ef5-41b9-820b-63fd78b3a384-kube-api-access-7xxgb\") pod \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " Dec 03 19:48:53 crc kubenswrapper[4916]: I1203 19:48:53.799993 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-combined-ca-bundle\") pod \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " Dec 03 19:48:53 crc kubenswrapper[4916]: I1203 19:48:53.800045 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-config-data\") pod \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\" (UID: \"94bf33bd-9ef5-41b9-820b-63fd78b3a384\") " Dec 03 19:48:53 crc kubenswrapper[4916]: I1203 19:48:53.812919 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94bf33bd-9ef5-41b9-820b-63fd78b3a384-kube-api-access-7xxgb" (OuterVolumeSpecName: "kube-api-access-7xxgb") pod "94bf33bd-9ef5-41b9-820b-63fd78b3a384" (UID: "94bf33bd-9ef5-41b9-820b-63fd78b3a384"). InnerVolumeSpecName "kube-api-access-7xxgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:53 crc kubenswrapper[4916]: I1203 19:48:53.847390 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94bf33bd-9ef5-41b9-820b-63fd78b3a384" (UID: "94bf33bd-9ef5-41b9-820b-63fd78b3a384"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:48:53 crc kubenswrapper[4916]: I1203 19:48:53.879479 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-config-data" (OuterVolumeSpecName: "config-data") pod "94bf33bd-9ef5-41b9-820b-63fd78b3a384" (UID: "94bf33bd-9ef5-41b9-820b-63fd78b3a384"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:48:53 crc kubenswrapper[4916]: I1203 19:48:53.902183 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xxgb\" (UniqueName: \"kubernetes.io/projected/94bf33bd-9ef5-41b9-820b-63fd78b3a384-kube-api-access-7xxgb\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:53 crc kubenswrapper[4916]: I1203 19:48:53.902241 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:53 crc kubenswrapper[4916]: I1203 19:48:53.902260 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94bf33bd-9ef5-41b9-820b-63fd78b3a384-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.268842 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-vzcmc" event={"ID":"94bf33bd-9ef5-41b9-820b-63fd78b3a384","Type":"ContainerDied","Data":"1c00d5546b0f7ad2fcd43fe243f65498a97022e76d11f36ac02817d9a0821d15"} Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.269187 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c00d5546b0f7ad2fcd43fe243f65498a97022e76d11f36ac02817d9a0821d15" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.268905 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-vzcmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.545486 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-8dn5k"] Dec 03 19:48:54 crc kubenswrapper[4916]: E1203 19:48:54.549747 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97246a98-afa5-477d-9528-19c6fd55a094" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.549776 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="97246a98-afa5-477d-9528-19c6fd55a094" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: E1203 19:48:54.549819 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef23599-1005-496a-a421-e3f4300e8b5a" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.549826 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef23599-1005-496a-a421-e3f4300e8b5a" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: E1203 19:48:54.549834 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75d64faf-6ddb-49fc-8eda-03cc5c10233e" containerName="dnsmasq-dns" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.549839 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="75d64faf-6ddb-49fc-8eda-03cc5c10233e" containerName="dnsmasq-dns" Dec 03 19:48:54 crc kubenswrapper[4916]: E1203 19:48:54.549849 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="321a3852-3277-44d8-a126-a8549e29d224" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.549854 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="321a3852-3277-44d8-a126-a8549e29d224" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: E1203 19:48:54.549864 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a17100cf-50d9-4a21-8d10-b1e49808fe53" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.549870 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a17100cf-50d9-4a21-8d10-b1e49808fe53" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: E1203 19:48:54.549885 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94bf33bd-9ef5-41b9-820b-63fd78b3a384" containerName="keystone-db-sync" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.549890 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="94bf33bd-9ef5-41b9-820b-63fd78b3a384" containerName="keystone-db-sync" Dec 03 19:48:54 crc kubenswrapper[4916]: E1203 19:48:54.549902 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22a8aa59-7a7f-44f7-b766-2f1648211423" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.549908 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="22a8aa59-7a7f-44f7-b766-2f1648211423" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: E1203 19:48:54.549917 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.549924 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: E1203 19:48:54.549934 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75d64faf-6ddb-49fc-8eda-03cc5c10233e" containerName="init" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.549940 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="75d64faf-6ddb-49fc-8eda-03cc5c10233e" containerName="init" Dec 03 19:48:54 crc kubenswrapper[4916]: E1203 19:48:54.549952 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a211a30-fc89-4c07-afaf-e269d5ba2295" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.549957 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a211a30-fc89-4c07-afaf-e269d5ba2295" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: E1203 19:48:54.549973 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9622e2e-7d39-4ec8-b6fc-580eee868216" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.549978 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9622e2e-7d39-4ec8-b6fc-580eee868216" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.550122 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="a17100cf-50d9-4a21-8d10-b1e49808fe53" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.550135 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ef23599-1005-496a-a421-e3f4300e8b5a" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.550149 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.550156 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="94bf33bd-9ef5-41b9-820b-63fd78b3a384" containerName="keystone-db-sync" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.550166 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9622e2e-7d39-4ec8-b6fc-580eee868216" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.550177 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a211a30-fc89-4c07-afaf-e269d5ba2295" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.550187 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="321a3852-3277-44d8-a126-a8549e29d224" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.550196 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="97246a98-afa5-477d-9528-19c6fd55a094" containerName="mariadb-database-create" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.550204 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="75d64faf-6ddb-49fc-8eda-03cc5c10233e" containerName="dnsmasq-dns" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.550215 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="22a8aa59-7a7f-44f7-b766-2f1648211423" containerName="mariadb-account-create-update" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.551009 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.553628 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-8dn5k"] Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.601109 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-nqfmc"] Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.602460 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.607022 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hvlw8" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.608362 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.608520 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.611192 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.611692 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-nqfmc"] Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.612651 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.722867 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-credential-keys\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.722927 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-scripts\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.722958 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-svc\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.722988 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p9rg\" (UniqueName: \"kubernetes.io/projected/56003ff0-c41d-4532-9091-744aebf1d53a-kube-api-access-5p9rg\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.723039 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-764p9\" (UniqueName: \"kubernetes.io/projected/2176d2ca-1321-4345-a377-c4dc42dcc1a1-kube-api-access-764p9\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.723067 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-combined-ca-bundle\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.723208 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-config\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.723299 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.723345 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-fernet-keys\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.723383 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.723416 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.723443 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-config-data\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.737416 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-lhffz"] Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.738389 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-lhffz" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.740912 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.746713 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.750053 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-h4fp6" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.773952 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-lhffz"] Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825398 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-764p9\" (UniqueName: \"kubernetes.io/projected/2176d2ca-1321-4345-a377-c4dc42dcc1a1-kube-api-access-764p9\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825483 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-combined-ca-bundle\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825552 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-config\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825598 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825622 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-fernet-keys\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825645 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825665 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825685 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-config-data\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825703 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-credential-keys\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825720 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-scripts\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825750 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-svc\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.825777 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p9rg\" (UniqueName: \"kubernetes.io/projected/56003ff0-c41d-4532-9091-744aebf1d53a-kube-api-access-5p9rg\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.826636 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.826733 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.827812 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-svc\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.828131 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.828382 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-config\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.836556 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-combined-ca-bundle\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.837047 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-l4sh2"] Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.838058 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.842710 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-config-data\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.844084 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-xrjfc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.846145 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-fernet-keys\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.846409 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.850615 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.859696 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-l4sh2"] Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.861040 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-credential-keys\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.861342 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-scripts\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.864171 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p9rg\" (UniqueName: \"kubernetes.io/projected/56003ff0-c41d-4532-9091-744aebf1d53a-kube-api-access-5p9rg\") pod \"dnsmasq-dns-847c4cc679-8dn5k\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.869242 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-764p9\" (UniqueName: \"kubernetes.io/projected/2176d2ca-1321-4345-a377-c4dc42dcc1a1-kube-api-access-764p9\") pod \"keystone-bootstrap-nqfmc\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.881453 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.883473 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.884984 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.887582 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.890159 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.899362 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-8dn5k"] Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.928870 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47bpf\" (UniqueName: \"kubernetes.io/projected/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-kube-api-access-47bpf\") pod \"neutron-db-sync-lhffz\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " pod="openstack/neutron-db-sync-lhffz" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.928998 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-config\") pod \"neutron-db-sync-lhffz\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " pod="openstack/neutron-db-sync-lhffz" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.929020 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-combined-ca-bundle\") pod \"neutron-db-sync-lhffz\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " pod="openstack/neutron-db-sync-lhffz" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.934201 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.947926 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-xct2g"] Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.948954 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.952993 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.953944 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-2dvnk" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.954085 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.979962 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:48:54 crc kubenswrapper[4916]: I1203 19:48:54.988065 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-xct2g"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.015834 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-gd75g"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.017192 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.033686 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-gd75g"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.035157 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.035195 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-combined-ca-bundle\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.035219 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-run-httpd\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.035289 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-config-data\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.035311 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-config\") pod \"neutron-db-sync-lhffz\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " pod="openstack/neutron-db-sync-lhffz" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.035329 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-config-data\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.035356 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-combined-ca-bundle\") pod \"neutron-db-sync-lhffz\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " pod="openstack/neutron-db-sync-lhffz" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.035412 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47bpf\" (UniqueName: \"kubernetes.io/projected/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-kube-api-access-47bpf\") pod \"neutron-db-sync-lhffz\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " pod="openstack/neutron-db-sync-lhffz" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.035441 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.036719 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-scripts\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.036777 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vlpl\" (UniqueName: \"kubernetes.io/projected/7eac938e-d147-4214-a0b1-4a17ac69b649-kube-api-access-4vlpl\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.036799 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-db-sync-config-data\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.036851 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eac938e-d147-4214-a0b1-4a17ac69b649-etc-machine-id\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.036905 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9nmj\" (UniqueName: \"kubernetes.io/projected/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-kube-api-access-v9nmj\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.036948 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-log-httpd\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.036967 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-scripts\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.048079 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-config\") pod \"neutron-db-sync-lhffz\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " pod="openstack/neutron-db-sync-lhffz" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.050753 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-combined-ca-bundle\") pod \"neutron-db-sync-lhffz\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " pod="openstack/neutron-db-sync-lhffz" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.051850 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-mvxxd"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.053156 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-mvxxd" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.058730 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47bpf\" (UniqueName: \"kubernetes.io/projected/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-kube-api-access-47bpf\") pod \"neutron-db-sync-lhffz\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " pod="openstack/neutron-db-sync-lhffz" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.058807 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.058997 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-29w6f" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.066127 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-mvxxd"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.138900 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-scripts\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.138960 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vlpl\" (UniqueName: \"kubernetes.io/projected/7eac938e-d147-4214-a0b1-4a17ac69b649-kube-api-access-4vlpl\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.138984 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-db-sync-config-data\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139007 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-scripts\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139032 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eac938e-d147-4214-a0b1-4a17ac69b649-etc-machine-id\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139056 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2377ccfa-eef8-4809-993d-28cf0320206a-logs\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139079 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9nmj\" (UniqueName: \"kubernetes.io/projected/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-kube-api-access-v9nmj\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139097 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9phn\" (UniqueName: \"kubernetes.io/projected/ecf17f0e-cc04-4896-b816-a045e729e980-kube-api-access-z9phn\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139127 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-log-httpd\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139149 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139172 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-scripts\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139194 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139216 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139240 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-combined-ca-bundle\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139255 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139274 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139292 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-run-httpd\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139308 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjgg8\" (UniqueName: \"kubernetes.io/projected/2377ccfa-eef8-4809-993d-28cf0320206a-kube-api-access-fjgg8\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139335 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-config\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139364 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-config-data\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139380 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-config-data\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139421 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-config-data\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139436 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-combined-ca-bundle\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139459 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.139692 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eac938e-d147-4214-a0b1-4a17ac69b649-etc-machine-id\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.140156 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-run-httpd\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.143366 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-scripts\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.143390 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-log-httpd\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.145861 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-db-sync-config-data\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.145891 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-scripts\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.146348 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.150972 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-config-data\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.153302 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.156596 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-combined-ca-bundle\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.157162 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vlpl\" (UniqueName: \"kubernetes.io/projected/7eac938e-d147-4214-a0b1-4a17ac69b649-kube-api-access-4vlpl\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.176437 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-config-data\") pod \"cinder-db-sync-l4sh2\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.182407 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9nmj\" (UniqueName: \"kubernetes.io/projected/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-kube-api-access-v9nmj\") pod \"ceilometer-0\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.236833 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-q6cjh"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.238013 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.240814 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-gn5p6" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.240906 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.243197 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-q6cjh"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.245709 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.245748 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjgg8\" (UniqueName: \"kubernetes.io/projected/2377ccfa-eef8-4809-993d-28cf0320206a-kube-api-access-fjgg8\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.245777 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-config\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.245833 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-combined-ca-bundle\") pod \"heat-db-sync-mvxxd\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " pod="openstack/heat-db-sync-mvxxd" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.245881 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-config-data\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.245899 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-combined-ca-bundle\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.245969 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-config-data\") pod \"heat-db-sync-mvxxd\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " pod="openstack/heat-db-sync-mvxxd" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.246003 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-scripts\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.246034 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2377ccfa-eef8-4809-993d-28cf0320206a-logs\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.246061 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9phn\" (UniqueName: \"kubernetes.io/projected/ecf17f0e-cc04-4896-b816-a045e729e980-kube-api-access-z9phn\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.246081 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.246109 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9ktm\" (UniqueName: \"kubernetes.io/projected/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-kube-api-access-p9ktm\") pod \"heat-db-sync-mvxxd\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " pod="openstack/heat-db-sync-mvxxd" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.246140 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.246162 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.249818 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-config-data\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.252022 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2377ccfa-eef8-4809-993d-28cf0320206a-logs\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.252634 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-combined-ca-bundle\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.256069 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-scripts\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.258886 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.259250 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.259340 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.265685 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-config\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.265901 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.268489 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjgg8\" (UniqueName: \"kubernetes.io/projected/2377ccfa-eef8-4809-993d-28cf0320206a-kube-api-access-fjgg8\") pod \"placement-db-sync-xct2g\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.277279 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9phn\" (UniqueName: \"kubernetes.io/projected/ecf17f0e-cc04-4896-b816-a045e729e980-kube-api-access-z9phn\") pod \"dnsmasq-dns-785d8bcb8c-gd75g\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.360333 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-config-data\") pod \"heat-db-sync-mvxxd\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " pod="openstack/heat-db-sync-mvxxd" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.360507 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9ktm\" (UniqueName: \"kubernetes.io/projected/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-kube-api-access-p9ktm\") pod \"heat-db-sync-mvxxd\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " pod="openstack/heat-db-sync-mvxxd" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.360557 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-db-sync-config-data\") pod \"barbican-db-sync-q6cjh\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.360995 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skwrb\" (UniqueName: \"kubernetes.io/projected/0d5347c1-1439-4284-977d-390912ffe9a5-kube-api-access-skwrb\") pod \"barbican-db-sync-q6cjh\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.361103 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-combined-ca-bundle\") pod \"barbican-db-sync-q6cjh\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.361156 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-combined-ca-bundle\") pod \"heat-db-sync-mvxxd\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " pod="openstack/heat-db-sync-mvxxd" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.362181 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-lhffz" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.370335 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-config-data\") pod \"heat-db-sync-mvxxd\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " pod="openstack/heat-db-sync-mvxxd" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.370669 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.384176 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-combined-ca-bundle\") pod \"heat-db-sync-mvxxd\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " pod="openstack/heat-db-sync-mvxxd" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.386751 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9ktm\" (UniqueName: \"kubernetes.io/projected/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-kube-api-access-p9ktm\") pod \"heat-db-sync-mvxxd\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " pod="openstack/heat-db-sync-mvxxd" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.409108 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.419556 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xct2g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.443148 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.446812 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-mvxxd" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.465403 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-db-sync-config-data\") pod \"barbican-db-sync-q6cjh\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.465450 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skwrb\" (UniqueName: \"kubernetes.io/projected/0d5347c1-1439-4284-977d-390912ffe9a5-kube-api-access-skwrb\") pod \"barbican-db-sync-q6cjh\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.465484 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-combined-ca-bundle\") pod \"barbican-db-sync-q6cjh\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.475662 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-db-sync-config-data\") pod \"barbican-db-sync-q6cjh\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.476387 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-combined-ca-bundle\") pod \"barbican-db-sync-q6cjh\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.498401 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skwrb\" (UniqueName: \"kubernetes.io/projected/0d5347c1-1439-4284-977d-390912ffe9a5-kube-api-access-skwrb\") pod \"barbican-db-sync-q6cjh\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.533497 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-8dn5k"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.570373 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.616463 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-nqfmc"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.661412 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.663842 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.672017 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.672081 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.672028 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.672358 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-5xd5f" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.675378 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.721312 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.724607 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.727090 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.727295 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.752247 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.770847 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-config-data\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.770933 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.770961 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.770986 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-scripts\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.771024 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wclk\" (UniqueName: \"kubernetes.io/projected/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-kube-api-access-6wclk\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.771075 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.771104 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-logs\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.771131 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.873482 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.873938 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.873966 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-scripts\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874003 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-config-data\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874037 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-config-data\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874053 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874076 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874096 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874116 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-scripts\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874145 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wclk\" (UniqueName: \"kubernetes.io/projected/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-kube-api-access-6wclk\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874173 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-logs\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874197 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874217 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-logs\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874235 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874257 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.874288 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxczl\" (UniqueName: \"kubernetes.io/projected/867dcce9-cc32-4525-a877-53e6da57a995-kube-api-access-dxczl\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.875303 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.875491 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.875766 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-logs\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.889808 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.890280 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.891946 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-config-data\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.898178 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-scripts\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.899783 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wclk\" (UniqueName: \"kubernetes.io/projected/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-kube-api-access-6wclk\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.944313 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " pod="openstack/glance-default-external-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.947212 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-lhffz"] Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.977030 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.977099 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-scripts\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.977164 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-config-data\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.977747 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.977937 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-logs\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.978103 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.978271 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxczl\" (UniqueName: \"kubernetes.io/projected/867dcce9-cc32-4525-a877-53e6da57a995-kube-api-access-dxczl\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.978341 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.981037 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.981381 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.981701 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-logs\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.982359 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.983201 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-config-data\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.984869 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:55 crc kubenswrapper[4916]: I1203 19:48:55.987377 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-scripts\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.002669 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.016614 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxczl\" (UniqueName: \"kubernetes.io/projected/867dcce9-cc32-4525-a877-53e6da57a995-kube-api-access-dxczl\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.061295 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.098962 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-l4sh2"] Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.113843 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-xct2g"] Dec 03 19:48:56 crc kubenswrapper[4916]: W1203 19:48:56.145759 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2377ccfa_eef8_4809_993d_28cf0320206a.slice/crio-caa4f7afc503c792c695d3cdc78be6131aefd4e7d22c0b504129514b9045b36e WatchSource:0}: Error finding container caa4f7afc503c792c695d3cdc78be6131aefd4e7d22c0b504129514b9045b36e: Status 404 returned error can't find the container with id caa4f7afc503c792c695d3cdc78be6131aefd4e7d22c0b504129514b9045b36e Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.219496 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-mvxxd"] Dec 03 19:48:56 crc kubenswrapper[4916]: W1203 19:48:56.221741 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64ae3277_4d93_4a36_ba5a_9913bb3e58d7.slice/crio-18c8327bb634ff7f7b2f1abebbfdefabc4de1a17ce8641d745ec2c2c73a01957 WatchSource:0}: Error finding container 18c8327bb634ff7f7b2f1abebbfdefabc4de1a17ce8641d745ec2c2c73a01957: Status 404 returned error can't find the container with id 18c8327bb634ff7f7b2f1abebbfdefabc4de1a17ce8641d745ec2c2c73a01957 Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.228279 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.292996 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-l4sh2" event={"ID":"7eac938e-d147-4214-a0b1-4a17ac69b649","Type":"ContainerStarted","Data":"c9d148e8225f6114f6ca571f07be2155b40da1ed393c3427ad5744ac0a116181"} Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.294590 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fcf233ee-1271-4a7a-9fcb-5a7725746aa2","Type":"ContainerStarted","Data":"6b19e06dd45a6f0570d7965b5102b4f8cd8b3ef8c2fe6db9d2e8aafe669562f0"} Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.295767 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nqfmc" event={"ID":"2176d2ca-1321-4345-a377-c4dc42dcc1a1","Type":"ContainerStarted","Data":"600a6b63525296d8aed44d6ca36dc467fcbfd3ad8151fccf71cf57423d6431eb"} Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.297028 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" event={"ID":"56003ff0-c41d-4532-9091-744aebf1d53a","Type":"ContainerStarted","Data":"fe74264445acae2e898f5b8af4ebf20f23296090b0ff90b36cee0ae4fb2f249b"} Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.298263 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xct2g" event={"ID":"2377ccfa-eef8-4809-993d-28cf0320206a","Type":"ContainerStarted","Data":"caa4f7afc503c792c695d3cdc78be6131aefd4e7d22c0b504129514b9045b36e"} Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.299366 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-mvxxd" event={"ID":"64ae3277-4d93-4a36-ba5a-9913bb3e58d7","Type":"ContainerStarted","Data":"18c8327bb634ff7f7b2f1abebbfdefabc4de1a17ce8641d745ec2c2c73a01957"} Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.300545 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-lhffz" event={"ID":"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c","Type":"ContainerStarted","Data":"b826497f95faa5888f6e2656fc2800e171d6bf716e599dcdfe29bc9834138ee9"} Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.356755 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.717924 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:48:56 crc kubenswrapper[4916]: I1203 19:48:56.926250 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.251257 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-gd75g"] Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.263313 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.338782 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-q6cjh"] Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.340989 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" event={"ID":"ecf17f0e-cc04-4896-b816-a045e729e980","Type":"ContainerStarted","Data":"561d46ba13ecf638c5f349704de15f31f3d6dd41e29f76c1b4be064c9f76cf11"} Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.350624 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nqfmc" event={"ID":"2176d2ca-1321-4345-a377-c4dc42dcc1a1","Type":"ContainerStarted","Data":"47dd291dcb1707e985089dd6fe1ea10400031631de2e1f3c975f74b6ddf2d368"} Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.353226 4916 generic.go:334] "Generic (PLEG): container finished" podID="56003ff0-c41d-4532-9091-744aebf1d53a" containerID="1f1e063ec31f8e535bbe9dd517e4cb61188c5cff194d12b8b7136d54aaed4155" exitCode=0 Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.353385 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" event={"ID":"56003ff0-c41d-4532-9091-744aebf1d53a","Type":"ContainerDied","Data":"1f1e063ec31f8e535bbe9dd517e4cb61188c5cff194d12b8b7136d54aaed4155"} Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.357185 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-lhffz" event={"ID":"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c","Type":"ContainerStarted","Data":"84f1626ea5cf3e3cbd1d94f33ef8c72b5c7d1c136b59a64cdf8764c511f3ea9d"} Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.375115 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-nqfmc" podStartSLOduration=3.375101101 podStartE2EDuration="3.375101101s" podCreationTimestamp="2025-12-03 19:48:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:57.369621575 +0000 UTC m=+1153.332431841" watchObservedRunningTime="2025-12-03 19:48:57.375101101 +0000 UTC m=+1153.337911367" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.417225 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-lhffz" podStartSLOduration=3.417209463 podStartE2EDuration="3.417209463s" podCreationTimestamp="2025-12-03 19:48:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:57.403784806 +0000 UTC m=+1153.366595072" watchObservedRunningTime="2025-12-03 19:48:57.417209463 +0000 UTC m=+1153.380019719" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.581137 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:48:57 crc kubenswrapper[4916]: W1203 19:48:57.622785 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5068ad8_5ba9_4258_ae1e_6f1532a9b464.slice/crio-f4dce6c24718d6089b3f27e5fd189786f6fad4efe9b9f9b58db599c47effc742 WatchSource:0}: Error finding container f4dce6c24718d6089b3f27e5fd189786f6fad4efe9b9f9b58db599c47effc742: Status 404 returned error can't find the container with id f4dce6c24718d6089b3f27e5fd189786f6fad4efe9b9f9b58db599c47effc742 Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.686591 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.824115 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-swift-storage-0\") pod \"56003ff0-c41d-4532-9091-744aebf1d53a\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.824326 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p9rg\" (UniqueName: \"kubernetes.io/projected/56003ff0-c41d-4532-9091-744aebf1d53a-kube-api-access-5p9rg\") pod \"56003ff0-c41d-4532-9091-744aebf1d53a\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.824380 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-nb\") pod \"56003ff0-c41d-4532-9091-744aebf1d53a\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.824459 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-config\") pod \"56003ff0-c41d-4532-9091-744aebf1d53a\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.824662 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-sb\") pod \"56003ff0-c41d-4532-9091-744aebf1d53a\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.824708 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-svc\") pod \"56003ff0-c41d-4532-9091-744aebf1d53a\" (UID: \"56003ff0-c41d-4532-9091-744aebf1d53a\") " Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.831828 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56003ff0-c41d-4532-9091-744aebf1d53a-kube-api-access-5p9rg" (OuterVolumeSpecName: "kube-api-access-5p9rg") pod "56003ff0-c41d-4532-9091-744aebf1d53a" (UID: "56003ff0-c41d-4532-9091-744aebf1d53a"). InnerVolumeSpecName "kube-api-access-5p9rg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.870053 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-config" (OuterVolumeSpecName: "config") pod "56003ff0-c41d-4532-9091-744aebf1d53a" (UID: "56003ff0-c41d-4532-9091-744aebf1d53a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.870227 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "56003ff0-c41d-4532-9091-744aebf1d53a" (UID: "56003ff0-c41d-4532-9091-744aebf1d53a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.870312 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "56003ff0-c41d-4532-9091-744aebf1d53a" (UID: "56003ff0-c41d-4532-9091-744aebf1d53a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.870479 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "56003ff0-c41d-4532-9091-744aebf1d53a" (UID: "56003ff0-c41d-4532-9091-744aebf1d53a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.871366 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "56003ff0-c41d-4532-9091-744aebf1d53a" (UID: "56003ff0-c41d-4532-9091-744aebf1d53a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.927801 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p9rg\" (UniqueName: \"kubernetes.io/projected/56003ff0-c41d-4532-9091-744aebf1d53a-kube-api-access-5p9rg\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.927841 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.927852 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.927864 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.927875 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:57 crc kubenswrapper[4916]: I1203 19:48:57.927883 4916 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/56003ff0-c41d-4532-9091-744aebf1d53a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 19:48:58 crc kubenswrapper[4916]: I1203 19:48:58.382503 4916 generic.go:334] "Generic (PLEG): container finished" podID="ecf17f0e-cc04-4896-b816-a045e729e980" containerID="aa1de021f75c26fafc46a054f5d541f7131b792fee54762e4c45f9a9c482f0dd" exitCode=0 Dec 03 19:48:58 crc kubenswrapper[4916]: I1203 19:48:58.383557 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" event={"ID":"ecf17f0e-cc04-4896-b816-a045e729e980","Type":"ContainerDied","Data":"aa1de021f75c26fafc46a054f5d541f7131b792fee54762e4c45f9a9c482f0dd"} Dec 03 19:48:58 crc kubenswrapper[4916]: I1203 19:48:58.387646 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" event={"ID":"56003ff0-c41d-4532-9091-744aebf1d53a","Type":"ContainerDied","Data":"fe74264445acae2e898f5b8af4ebf20f23296090b0ff90b36cee0ae4fb2f249b"} Dec 03 19:48:58 crc kubenswrapper[4916]: I1203 19:48:58.387700 4916 scope.go:117] "RemoveContainer" containerID="1f1e063ec31f8e535bbe9dd517e4cb61188c5cff194d12b8b7136d54aaed4155" Dec 03 19:48:58 crc kubenswrapper[4916]: I1203 19:48:58.387662 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-8dn5k" Dec 03 19:48:58 crc kubenswrapper[4916]: I1203 19:48:58.389303 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-q6cjh" event={"ID":"0d5347c1-1439-4284-977d-390912ffe9a5","Type":"ContainerStarted","Data":"00c1854f62560940e96e27e1043cd769c0e5a04dee7e53b78ede988f64f46b96"} Dec 03 19:48:58 crc kubenswrapper[4916]: I1203 19:48:58.391087 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:48:58 crc kubenswrapper[4916]: I1203 19:48:58.392301 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a5068ad8-5ba9-4258-ae1e-6f1532a9b464","Type":"ContainerStarted","Data":"f4dce6c24718d6089b3f27e5fd189786f6fad4efe9b9f9b58db599c47effc742"} Dec 03 19:48:58 crc kubenswrapper[4916]: W1203 19:48:58.466219 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod867dcce9_cc32_4525_a877_53e6da57a995.slice/crio-ebb597b1189fbc144ef07f235497c917254556532115efb13772dba489f37efc WatchSource:0}: Error finding container ebb597b1189fbc144ef07f235497c917254556532115efb13772dba489f37efc: Status 404 returned error can't find the container with id ebb597b1189fbc144ef07f235497c917254556532115efb13772dba489f37efc Dec 03 19:48:58 crc kubenswrapper[4916]: I1203 19:48:58.572608 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-8dn5k"] Dec 03 19:48:58 crc kubenswrapper[4916]: I1203 19:48:58.578886 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-8dn5k"] Dec 03 19:48:59 crc kubenswrapper[4916]: I1203 19:48:59.410894 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a5068ad8-5ba9-4258-ae1e-6f1532a9b464","Type":"ContainerStarted","Data":"f11e96586a415d9f69bcfb959bb056323dd17a490a3258830953f856292d1c5c"} Dec 03 19:48:59 crc kubenswrapper[4916]: I1203 19:48:59.411393 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a5068ad8-5ba9-4258-ae1e-6f1532a9b464","Type":"ContainerStarted","Data":"d98930d213f9e0b55f4a5d72cfd838f2df4ca8d296942189e714524026f0eea9"} Dec 03 19:48:59 crc kubenswrapper[4916]: I1203 19:48:59.410972 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a5068ad8-5ba9-4258-ae1e-6f1532a9b464" containerName="glance-log" containerID="cri-o://d98930d213f9e0b55f4a5d72cfd838f2df4ca8d296942189e714524026f0eea9" gracePeriod=30 Dec 03 19:48:59 crc kubenswrapper[4916]: I1203 19:48:59.411505 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a5068ad8-5ba9-4258-ae1e-6f1532a9b464" containerName="glance-httpd" containerID="cri-o://f11e96586a415d9f69bcfb959bb056323dd17a490a3258830953f856292d1c5c" gracePeriod=30 Dec 03 19:48:59 crc kubenswrapper[4916]: I1203 19:48:59.414088 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"867dcce9-cc32-4525-a877-53e6da57a995","Type":"ContainerStarted","Data":"53936c0c5f4f229ab165f895bf54c1279c2f7f0bdbc85a2166a02d7c0765302f"} Dec 03 19:48:59 crc kubenswrapper[4916]: I1203 19:48:59.414118 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"867dcce9-cc32-4525-a877-53e6da57a995","Type":"ContainerStarted","Data":"ebb597b1189fbc144ef07f235497c917254556532115efb13772dba489f37efc"} Dec 03 19:48:59 crc kubenswrapper[4916]: I1203 19:48:59.423842 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" event={"ID":"ecf17f0e-cc04-4896-b816-a045e729e980","Type":"ContainerStarted","Data":"0deeb30e4842d919816dae1ff103c485be5c52c50762213299185f14b3ee23d6"} Dec 03 19:48:59 crc kubenswrapper[4916]: I1203 19:48:59.424339 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:48:59 crc kubenswrapper[4916]: I1203 19:48:59.446603 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.446586613 podStartE2EDuration="5.446586613s" podCreationTimestamp="2025-12-03 19:48:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:59.433862704 +0000 UTC m=+1155.396672970" watchObservedRunningTime="2025-12-03 19:48:59.446586613 +0000 UTC m=+1155.409396879" Dec 03 19:48:59 crc kubenswrapper[4916]: I1203 19:48:59.474976 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" podStartSLOduration=5.474952229 podStartE2EDuration="5.474952229s" podCreationTimestamp="2025-12-03 19:48:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:48:59.457986557 +0000 UTC m=+1155.420796823" watchObservedRunningTime="2025-12-03 19:48:59.474952229 +0000 UTC m=+1155.437762495" Dec 03 19:49:00 crc kubenswrapper[4916]: I1203 19:49:00.435100 4916 generic.go:334] "Generic (PLEG): container finished" podID="a5068ad8-5ba9-4258-ae1e-6f1532a9b464" containerID="f11e96586a415d9f69bcfb959bb056323dd17a490a3258830953f856292d1c5c" exitCode=143 Dec 03 19:49:00 crc kubenswrapper[4916]: I1203 19:49:00.435458 4916 generic.go:334] "Generic (PLEG): container finished" podID="a5068ad8-5ba9-4258-ae1e-6f1532a9b464" containerID="d98930d213f9e0b55f4a5d72cfd838f2df4ca8d296942189e714524026f0eea9" exitCode=143 Dec 03 19:49:00 crc kubenswrapper[4916]: I1203 19:49:00.435153 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a5068ad8-5ba9-4258-ae1e-6f1532a9b464","Type":"ContainerDied","Data":"f11e96586a415d9f69bcfb959bb056323dd17a490a3258830953f856292d1c5c"} Dec 03 19:49:00 crc kubenswrapper[4916]: I1203 19:49:00.435528 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a5068ad8-5ba9-4258-ae1e-6f1532a9b464","Type":"ContainerDied","Data":"d98930d213f9e0b55f4a5d72cfd838f2df4ca8d296942189e714524026f0eea9"} Dec 03 19:49:00 crc kubenswrapper[4916]: I1203 19:49:00.491590 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56003ff0-c41d-4532-9091-744aebf1d53a" path="/var/lib/kubelet/pods/56003ff0-c41d-4532-9091-744aebf1d53a/volumes" Dec 03 19:49:01 crc kubenswrapper[4916]: I1203 19:49:01.450425 4916 generic.go:334] "Generic (PLEG): container finished" podID="2176d2ca-1321-4345-a377-c4dc42dcc1a1" containerID="47dd291dcb1707e985089dd6fe1ea10400031631de2e1f3c975f74b6ddf2d368" exitCode=0 Dec 03 19:49:01 crc kubenswrapper[4916]: I1203 19:49:01.450691 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nqfmc" event={"ID":"2176d2ca-1321-4345-a377-c4dc42dcc1a1","Type":"ContainerDied","Data":"47dd291dcb1707e985089dd6fe1ea10400031631de2e1f3c975f74b6ddf2d368"} Dec 03 19:49:05 crc kubenswrapper[4916]: I1203 19:49:05.444815 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:49:05 crc kubenswrapper[4916]: I1203 19:49:05.539233 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jtfbw"] Dec 03 19:49:05 crc kubenswrapper[4916]: I1203 19:49:05.539867 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerName="dnsmasq-dns" containerID="cri-o://250f60d34758f570cddca0f5143b1c42ac04c018bc251bbddd6f7a39d6bd4506" gracePeriod=10 Dec 03 19:49:05 crc kubenswrapper[4916]: I1203 19:49:05.806532 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: connect: connection refused" Dec 03 19:49:07 crc kubenswrapper[4916]: I1203 19:49:07.515970 4916 generic.go:334] "Generic (PLEG): container finished" podID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerID="250f60d34758f570cddca0f5143b1c42ac04c018bc251bbddd6f7a39d6bd4506" exitCode=0 Dec 03 19:49:07 crc kubenswrapper[4916]: I1203 19:49:07.516087 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" event={"ID":"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6","Type":"ContainerDied","Data":"250f60d34758f570cddca0f5143b1c42ac04c018bc251bbddd6f7a39d6bd4506"} Dec 03 19:49:10 crc kubenswrapper[4916]: I1203 19:49:10.806335 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: connect: connection refused" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.628887 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:49:12 crc kubenswrapper[4916]: E1203 19:49:12.691959 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified" Dec 03 19:49:12 crc kubenswrapper[4916]: E1203 19:49:12.692160 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:heat-db-sync,Image:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,Command:[/bin/bash],Args:[-c /usr/bin/heat-manage --config-dir /etc/heat/heat.conf.d db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/heat/heat.conf.d/00-default.conf,SubPath:00-default.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/heat/heat.conf.d/01-custom.conf,SubPath:01-custom.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p9ktm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42418,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42418,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-db-sync-mvxxd_openstack(64ae3277-4d93-4a36-ba5a-9913bb3e58d7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 19:49:12 crc kubenswrapper[4916]: E1203 19:49:12.693379 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/heat-db-sync-mvxxd" podUID="64ae3277-4d93-4a36-ba5a-9913bb3e58d7" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.754634 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-scripts\") pod \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.754677 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-credential-keys\") pod \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.754758 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-fernet-keys\") pod \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.754927 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-config-data\") pod \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.754954 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-combined-ca-bundle\") pod \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.754977 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-764p9\" (UniqueName: \"kubernetes.io/projected/2176d2ca-1321-4345-a377-c4dc42dcc1a1-kube-api-access-764p9\") pod \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\" (UID: \"2176d2ca-1321-4345-a377-c4dc42dcc1a1\") " Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.762729 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "2176d2ca-1321-4345-a377-c4dc42dcc1a1" (UID: "2176d2ca-1321-4345-a377-c4dc42dcc1a1"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.763122 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "2176d2ca-1321-4345-a377-c4dc42dcc1a1" (UID: "2176d2ca-1321-4345-a377-c4dc42dcc1a1"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.763250 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-scripts" (OuterVolumeSpecName: "scripts") pod "2176d2ca-1321-4345-a377-c4dc42dcc1a1" (UID: "2176d2ca-1321-4345-a377-c4dc42dcc1a1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.770815 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2176d2ca-1321-4345-a377-c4dc42dcc1a1-kube-api-access-764p9" (OuterVolumeSpecName: "kube-api-access-764p9") pod "2176d2ca-1321-4345-a377-c4dc42dcc1a1" (UID: "2176d2ca-1321-4345-a377-c4dc42dcc1a1"). InnerVolumeSpecName "kube-api-access-764p9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.792461 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-config-data" (OuterVolumeSpecName: "config-data") pod "2176d2ca-1321-4345-a377-c4dc42dcc1a1" (UID: "2176d2ca-1321-4345-a377-c4dc42dcc1a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.793105 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2176d2ca-1321-4345-a377-c4dc42dcc1a1" (UID: "2176d2ca-1321-4345-a377-c4dc42dcc1a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.857345 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.857386 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.857399 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-764p9\" (UniqueName: \"kubernetes.io/projected/2176d2ca-1321-4345-a377-c4dc42dcc1a1-kube-api-access-764p9\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.857411 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.857422 4916 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:12 crc kubenswrapper[4916]: I1203 19:49:12.857431 4916 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2176d2ca-1321-4345-a377-c4dc42dcc1a1-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.576085 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nqfmc" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.576085 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nqfmc" event={"ID":"2176d2ca-1321-4345-a377-c4dc42dcc1a1","Type":"ContainerDied","Data":"600a6b63525296d8aed44d6ca36dc467fcbfd3ad8151fccf71cf57423d6431eb"} Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.576540 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="600a6b63525296d8aed44d6ca36dc467fcbfd3ad8151fccf71cf57423d6431eb" Dec 03 19:49:13 crc kubenswrapper[4916]: E1203 19:49:13.578208 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified\\\"\"" pod="openstack/heat-db-sync-mvxxd" podUID="64ae3277-4d93-4a36-ba5a-9913bb3e58d7" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.735705 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-nqfmc"] Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.749469 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-nqfmc"] Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.833054 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-j8v5b"] Dec 03 19:49:13 crc kubenswrapper[4916]: E1203 19:49:13.833772 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56003ff0-c41d-4532-9091-744aebf1d53a" containerName="init" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.833871 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="56003ff0-c41d-4532-9091-744aebf1d53a" containerName="init" Dec 03 19:49:13 crc kubenswrapper[4916]: E1203 19:49:13.833966 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2176d2ca-1321-4345-a377-c4dc42dcc1a1" containerName="keystone-bootstrap" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.834026 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="2176d2ca-1321-4345-a377-c4dc42dcc1a1" containerName="keystone-bootstrap" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.834259 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="56003ff0-c41d-4532-9091-744aebf1d53a" containerName="init" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.834348 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="2176d2ca-1321-4345-a377-c4dc42dcc1a1" containerName="keystone-bootstrap" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.835107 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.837847 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.837978 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.837847 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hvlw8" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.838337 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.843453 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.851913 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-j8v5b"] Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.875190 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-fernet-keys\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.875433 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-config-data\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.875604 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-combined-ca-bundle\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.875727 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-scripts\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.875838 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbmg4\" (UniqueName: \"kubernetes.io/projected/4a166793-92a9-4ad3-95cb-1743fe20d361-kube-api-access-lbmg4\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.875995 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-credential-keys\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.977823 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-credential-keys\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.977884 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-fernet-keys\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.977932 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-config-data\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.978017 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-combined-ca-bundle\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.978066 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-scripts\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.978097 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbmg4\" (UniqueName: \"kubernetes.io/projected/4a166793-92a9-4ad3-95cb-1743fe20d361-kube-api-access-lbmg4\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.982532 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-scripts\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.983078 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-fernet-keys\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.983132 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-credential-keys\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.983524 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-combined-ca-bundle\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.984702 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-config-data\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:13 crc kubenswrapper[4916]: I1203 19:49:13.998388 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbmg4\" (UniqueName: \"kubernetes.io/projected/4a166793-92a9-4ad3-95cb-1743fe20d361-kube-api-access-lbmg4\") pod \"keystone-bootstrap-j8v5b\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:14 crc kubenswrapper[4916]: I1203 19:49:14.167422 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:14 crc kubenswrapper[4916]: I1203 19:49:14.488790 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2176d2ca-1321-4345-a377-c4dc42dcc1a1" path="/var/lib/kubelet/pods/2176d2ca-1321-4345-a377-c4dc42dcc1a1/volumes" Dec 03 19:49:15 crc kubenswrapper[4916]: I1203 19:49:15.806109 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: connect: connection refused" Dec 03 19:49:15 crc kubenswrapper[4916]: I1203 19:49:15.806350 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:49:20 crc kubenswrapper[4916]: I1203 19:49:20.806267 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: connect: connection refused" Dec 03 19:49:25 crc kubenswrapper[4916]: I1203 19:49:25.806231 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.127:5353: connect: connection refused" Dec 03 19:49:26 crc kubenswrapper[4916]: I1203 19:49:26.003175 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 19:49:26 crc kubenswrapper[4916]: I1203 19:49:26.003220 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.106811 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.175143 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-nb\") pod \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.175238 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-svc\") pod \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.175296 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-sb\") pod \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.175375 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpsqz\" (UniqueName: \"kubernetes.io/projected/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-kube-api-access-bpsqz\") pod \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.175504 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-config\") pod \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.175546 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-swift-storage-0\") pod \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\" (UID: \"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6\") " Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.182256 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-kube-api-access-bpsqz" (OuterVolumeSpecName: "kube-api-access-bpsqz") pod "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" (UID: "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6"). InnerVolumeSpecName "kube-api-access-bpsqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.232302 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-config" (OuterVolumeSpecName: "config") pod "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" (UID: "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.234007 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" (UID: "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.240548 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" (UID: "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.250479 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" (UID: "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:49:27 crc kubenswrapper[4916]: E1203 19:49:27.251091 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Dec 03 19:49:27 crc kubenswrapper[4916]: E1203 19:49:27.251249 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nd5h598h666h58ch77h679h5fh555h66ch667h65dhc5h5b6h584h58ch698h5cdh54ch549h676hcbhbfh98hd9h678h679h579h568h79h667h59ch5c4q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v9nmj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(fcf233ee-1271-4a7a-9fcb-5a7725746aa2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.262076 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" (UID: "e8fb5ee1-e661-4656-94c6-2dcac6ce93b6"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.278112 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.278141 4916 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.278153 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.278161 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.278169 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.278177 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpsqz\" (UniqueName: \"kubernetes.io/projected/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6-kube-api-access-bpsqz\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.717613 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" event={"ID":"e8fb5ee1-e661-4656-94c6-2dcac6ce93b6","Type":"ContainerDied","Data":"fd9e312195bfbeee4a7090dbc6a955453163e6acd1ba7c400808ecb513ad8c18"} Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.717683 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74f6bcbc87-jtfbw" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.717726 4916 scope.go:117] "RemoveContainer" containerID="250f60d34758f570cddca0f5143b1c42ac04c018bc251bbddd6f7a39d6bd4506" Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.771122 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jtfbw"] Dec 03 19:49:27 crc kubenswrapper[4916]: I1203 19:49:27.784685 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74f6bcbc87-jtfbw"] Dec 03 19:49:28 crc kubenswrapper[4916]: E1203 19:49:28.300808 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 03 19:49:28 crc kubenswrapper[4916]: E1203 19:49:28.301798 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-skwrb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-q6cjh_openstack(0d5347c1-1439-4284-977d-390912ffe9a5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 19:49:28 crc kubenswrapper[4916]: E1203 19:49:28.303035 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-q6cjh" podUID="0d5347c1-1439-4284-977d-390912ffe9a5" Dec 03 19:49:28 crc kubenswrapper[4916]: I1203 19:49:28.489347 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" path="/var/lib/kubelet/pods/e8fb5ee1-e661-4656-94c6-2dcac6ce93b6/volumes" Dec 03 19:49:28 crc kubenswrapper[4916]: E1203 19:49:28.730994 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-q6cjh" podUID="0d5347c1-1439-4284-977d-390912ffe9a5" Dec 03 19:49:29 crc kubenswrapper[4916]: E1203 19:49:29.172628 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 03 19:49:29 crc kubenswrapper[4916]: E1203 19:49:29.173108 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4vlpl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-l4sh2_openstack(7eac938e-d147-4214-a0b1-4a17ac69b649): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.173469 4916 scope.go:117] "RemoveContainer" containerID="a02769c5632c765d76c1ed1f73faf7b1ed75c71fe378ab81a2a7a4b906b55fcc" Dec 03 19:49:29 crc kubenswrapper[4916]: E1203 19:49:29.175461 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-l4sh2" podUID="7eac938e-d147-4214-a0b1-4a17ac69b649" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.197686 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.321709 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-combined-ca-bundle\") pod \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.322020 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-public-tls-certs\") pod \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.322046 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.322089 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-httpd-run\") pod \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.322111 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-config-data\") pod \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.322159 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wclk\" (UniqueName: \"kubernetes.io/projected/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-kube-api-access-6wclk\") pod \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.322207 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-scripts\") pod \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.322343 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-logs\") pod \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\" (UID: \"a5068ad8-5ba9-4258-ae1e-6f1532a9b464\") " Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.323174 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-logs" (OuterVolumeSpecName: "logs") pod "a5068ad8-5ba9-4258-ae1e-6f1532a9b464" (UID: "a5068ad8-5ba9-4258-ae1e-6f1532a9b464"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.323391 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a5068ad8-5ba9-4258-ae1e-6f1532a9b464" (UID: "a5068ad8-5ba9-4258-ae1e-6f1532a9b464"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.335812 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-scripts" (OuterVolumeSpecName: "scripts") pod "a5068ad8-5ba9-4258-ae1e-6f1532a9b464" (UID: "a5068ad8-5ba9-4258-ae1e-6f1532a9b464"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.335928 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-kube-api-access-6wclk" (OuterVolumeSpecName: "kube-api-access-6wclk") pod "a5068ad8-5ba9-4258-ae1e-6f1532a9b464" (UID: "a5068ad8-5ba9-4258-ae1e-6f1532a9b464"). InnerVolumeSpecName "kube-api-access-6wclk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.338967 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "a5068ad8-5ba9-4258-ae1e-6f1532a9b464" (UID: "a5068ad8-5ba9-4258-ae1e-6f1532a9b464"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.362980 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5068ad8-5ba9-4258-ae1e-6f1532a9b464" (UID: "a5068ad8-5ba9-4258-ae1e-6f1532a9b464"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.413407 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-config-data" (OuterVolumeSpecName: "config-data") pod "a5068ad8-5ba9-4258-ae1e-6f1532a9b464" (UID: "a5068ad8-5ba9-4258-ae1e-6f1532a9b464"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.424586 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.424668 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.424805 4916 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.429325 4916 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.429342 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.429351 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wclk\" (UniqueName: \"kubernetes.io/projected/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-kube-api-access-6wclk\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.429358 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.446176 4916 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.448649 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a5068ad8-5ba9-4258-ae1e-6f1532a9b464" (UID: "a5068ad8-5ba9-4258-ae1e-6f1532a9b464"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.530908 4916 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5068ad8-5ba9-4258-ae1e-6f1532a9b464-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.531395 4916 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.642894 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-j8v5b"] Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.737619 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-mvxxd" event={"ID":"64ae3277-4d93-4a36-ba5a-9913bb3e58d7","Type":"ContainerStarted","Data":"c140bef391449e52f103aefaf68bfcfdab5c883b0a860a104a7e44334a63f96e"} Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.739983 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a5068ad8-5ba9-4258-ae1e-6f1532a9b464","Type":"ContainerDied","Data":"f4dce6c24718d6089b3f27e5fd189786f6fad4efe9b9f9b58db599c47effc742"} Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.740016 4916 scope.go:117] "RemoveContainer" containerID="f11e96586a415d9f69bcfb959bb056323dd17a490a3258830953f856292d1c5c" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.740073 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.750418 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xct2g" event={"ID":"2377ccfa-eef8-4809-993d-28cf0320206a","Type":"ContainerStarted","Data":"16855b2506ae4e533e78fe358d7fb9eb2644242928a53b6ae75b020d6f6aff58"} Dec 03 19:49:29 crc kubenswrapper[4916]: E1203 19:49:29.751007 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-l4sh2" podUID="7eac938e-d147-4214-a0b1-4a17ac69b649" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.756431 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-mvxxd" podStartSLOduration=2.739215712 podStartE2EDuration="35.756417898s" podCreationTimestamp="2025-12-03 19:48:54 +0000 UTC" firstStartedPulling="2025-12-03 19:48:56.225626783 +0000 UTC m=+1152.188437049" lastFinishedPulling="2025-12-03 19:49:29.242828969 +0000 UTC m=+1185.205639235" observedRunningTime="2025-12-03 19:49:29.755823912 +0000 UTC m=+1185.718634178" watchObservedRunningTime="2025-12-03 19:49:29.756417898 +0000 UTC m=+1185.719228164" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.788294 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-xct2g" podStartSLOduration=3.974931689 podStartE2EDuration="35.788280377s" podCreationTimestamp="2025-12-03 19:48:54 +0000 UTC" firstStartedPulling="2025-12-03 19:48:56.150025778 +0000 UTC m=+1152.112836034" lastFinishedPulling="2025-12-03 19:49:27.963374456 +0000 UTC m=+1183.926184722" observedRunningTime="2025-12-03 19:49:29.787451745 +0000 UTC m=+1185.750262011" watchObservedRunningTime="2025-12-03 19:49:29.788280377 +0000 UTC m=+1185.751090643" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.810585 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.827048 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.841892 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:49:29 crc kubenswrapper[4916]: E1203 19:49:29.842224 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5068ad8-5ba9-4258-ae1e-6f1532a9b464" containerName="glance-log" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.842240 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5068ad8-5ba9-4258-ae1e-6f1532a9b464" containerName="glance-log" Dec 03 19:49:29 crc kubenswrapper[4916]: E1203 19:49:29.842250 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerName="init" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.842258 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerName="init" Dec 03 19:49:29 crc kubenswrapper[4916]: E1203 19:49:29.842270 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerName="dnsmasq-dns" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.842277 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerName="dnsmasq-dns" Dec 03 19:49:29 crc kubenswrapper[4916]: E1203 19:49:29.842295 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5068ad8-5ba9-4258-ae1e-6f1532a9b464" containerName="glance-httpd" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.842301 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5068ad8-5ba9-4258-ae1e-6f1532a9b464" containerName="glance-httpd" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.842461 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8fb5ee1-e661-4656-94c6-2dcac6ce93b6" containerName="dnsmasq-dns" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.842477 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5068ad8-5ba9-4258-ae1e-6f1532a9b464" containerName="glance-httpd" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.842486 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5068ad8-5ba9-4258-ae1e-6f1532a9b464" containerName="glance-log" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.843313 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.849347 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.849478 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.853280 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:49:29 crc kubenswrapper[4916]: W1203 19:49:29.920965 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a166793_92a9_4ad3_95cb_1743fe20d361.slice/crio-35049cc263b99cd259ac7cdaa297937de029e18fcf7836656614e3715de5f78c WatchSource:0}: Error finding container 35049cc263b99cd259ac7cdaa297937de029e18fcf7836656614e3715de5f78c: Status 404 returned error can't find the container with id 35049cc263b99cd259ac7cdaa297937de029e18fcf7836656614e3715de5f78c Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.940203 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-logs\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.940498 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.940540 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.940640 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldnc5\" (UniqueName: \"kubernetes.io/projected/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-kube-api-access-ldnc5\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.940703 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.940793 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.940897 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-config-data\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.940965 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-scripts\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:29 crc kubenswrapper[4916]: I1203 19:49:29.951426 4916 scope.go:117] "RemoveContainer" containerID="d98930d213f9e0b55f4a5d72cfd838f2df4ca8d296942189e714524026f0eea9" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.042625 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-config-data\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.042711 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-scripts\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.042787 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-logs\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.042831 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.042855 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.042885 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldnc5\" (UniqueName: \"kubernetes.io/projected/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-kube-api-access-ldnc5\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.042906 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.042964 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.043353 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-logs\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.043702 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.047746 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.048691 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.057154 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.059085 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-scripts\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.063601 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-config-data\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.064336 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldnc5\" (UniqueName: \"kubernetes.io/projected/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-kube-api-access-ldnc5\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.095837 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.167776 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.493824 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5068ad8-5ba9-4258-ae1e-6f1532a9b464" path="/var/lib/kubelet/pods/a5068ad8-5ba9-4258-ae1e-6f1532a9b464/volumes" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.733728 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:49:30 crc kubenswrapper[4916]: W1203 19:49:30.738538 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fe190f9_3a33_4b45_809e_1bbff64ab3fb.slice/crio-683deb5e7c7d7e5ed32104cd334949b1fec52decc86df1ba819baf18a9fb524b WatchSource:0}: Error finding container 683deb5e7c7d7e5ed32104cd334949b1fec52decc86df1ba819baf18a9fb524b: Status 404 returned error can't find the container with id 683deb5e7c7d7e5ed32104cd334949b1fec52decc86df1ba819baf18a9fb524b Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.762399 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"867dcce9-cc32-4525-a877-53e6da57a995","Type":"ContainerStarted","Data":"7ac500ee704aac84f1ea37477f98760f62e9cb0d8df0d83de4abc7983cffd3a1"} Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.762537 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="867dcce9-cc32-4525-a877-53e6da57a995" containerName="glance-log" containerID="cri-o://53936c0c5f4f229ab165f895bf54c1279c2f7f0bdbc85a2166a02d7c0765302f" gracePeriod=30 Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.762996 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="867dcce9-cc32-4525-a877-53e6da57a995" containerName="glance-httpd" containerID="cri-o://7ac500ee704aac84f1ea37477f98760f62e9cb0d8df0d83de4abc7983cffd3a1" gracePeriod=30 Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.764270 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4fe190f9-3a33-4b45-809e-1bbff64ab3fb","Type":"ContainerStarted","Data":"683deb5e7c7d7e5ed32104cd334949b1fec52decc86df1ba819baf18a9fb524b"} Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.765527 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fcf233ee-1271-4a7a-9fcb-5a7725746aa2","Type":"ContainerStarted","Data":"94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7"} Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.767627 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-j8v5b" event={"ID":"4a166793-92a9-4ad3-95cb-1743fe20d361","Type":"ContainerStarted","Data":"d674940fced82d2aa961795cf7e868b9e49b8545a540155c283aea1156164fb8"} Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.767652 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-j8v5b" event={"ID":"4a166793-92a9-4ad3-95cb-1743fe20d361","Type":"ContainerStarted","Data":"35049cc263b99cd259ac7cdaa297937de029e18fcf7836656614e3715de5f78c"} Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.790581 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=36.790542911 podStartE2EDuration="36.790542911s" podCreationTimestamp="2025-12-03 19:48:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:49:30.784063749 +0000 UTC m=+1186.746874035" watchObservedRunningTime="2025-12-03 19:49:30.790542911 +0000 UTC m=+1186.753353177" Dec 03 19:49:30 crc kubenswrapper[4916]: I1203 19:49:30.840395 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-j8v5b" podStartSLOduration=17.840369999 podStartE2EDuration="17.840369999s" podCreationTimestamp="2025-12-03 19:49:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:49:30.805803088 +0000 UTC m=+1186.768613354" watchObservedRunningTime="2025-12-03 19:49:30.840369999 +0000 UTC m=+1186.803180265" Dec 03 19:49:31 crc kubenswrapper[4916]: I1203 19:49:31.784203 4916 generic.go:334] "Generic (PLEG): container finished" podID="2377ccfa-eef8-4809-993d-28cf0320206a" containerID="16855b2506ae4e533e78fe358d7fb9eb2644242928a53b6ae75b020d6f6aff58" exitCode=0 Dec 03 19:49:31 crc kubenswrapper[4916]: I1203 19:49:31.784315 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xct2g" event={"ID":"2377ccfa-eef8-4809-993d-28cf0320206a","Type":"ContainerDied","Data":"16855b2506ae4e533e78fe358d7fb9eb2644242928a53b6ae75b020d6f6aff58"} Dec 03 19:49:31 crc kubenswrapper[4916]: I1203 19:49:31.794404 4916 generic.go:334] "Generic (PLEG): container finished" podID="867dcce9-cc32-4525-a877-53e6da57a995" containerID="7ac500ee704aac84f1ea37477f98760f62e9cb0d8df0d83de4abc7983cffd3a1" exitCode=0 Dec 03 19:49:31 crc kubenswrapper[4916]: I1203 19:49:31.794450 4916 generic.go:334] "Generic (PLEG): container finished" podID="867dcce9-cc32-4525-a877-53e6da57a995" containerID="53936c0c5f4f229ab165f895bf54c1279c2f7f0bdbc85a2166a02d7c0765302f" exitCode=143 Dec 03 19:49:31 crc kubenswrapper[4916]: I1203 19:49:31.794512 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"867dcce9-cc32-4525-a877-53e6da57a995","Type":"ContainerDied","Data":"7ac500ee704aac84f1ea37477f98760f62e9cb0d8df0d83de4abc7983cffd3a1"} Dec 03 19:49:31 crc kubenswrapper[4916]: I1203 19:49:31.794540 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"867dcce9-cc32-4525-a877-53e6da57a995","Type":"ContainerDied","Data":"53936c0c5f4f229ab165f895bf54c1279c2f7f0bdbc85a2166a02d7c0765302f"} Dec 03 19:49:31 crc kubenswrapper[4916]: I1203 19:49:31.803171 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4fe190f9-3a33-4b45-809e-1bbff64ab3fb","Type":"ContainerStarted","Data":"3e204a8da2a04a4e00c481245b1b2d3acbb6b5af234256168fe1f97d4922f9df"} Dec 03 19:49:31 crc kubenswrapper[4916]: I1203 19:49:31.902957 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.022981 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"867dcce9-cc32-4525-a877-53e6da57a995\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.023042 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-combined-ca-bundle\") pod \"867dcce9-cc32-4525-a877-53e6da57a995\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.023079 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-httpd-run\") pod \"867dcce9-cc32-4525-a877-53e6da57a995\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.023104 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-logs\") pod \"867dcce9-cc32-4525-a877-53e6da57a995\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.023128 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-config-data\") pod \"867dcce9-cc32-4525-a877-53e6da57a995\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.023220 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-scripts\") pod \"867dcce9-cc32-4525-a877-53e6da57a995\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.023288 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxczl\" (UniqueName: \"kubernetes.io/projected/867dcce9-cc32-4525-a877-53e6da57a995-kube-api-access-dxczl\") pod \"867dcce9-cc32-4525-a877-53e6da57a995\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.023306 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-internal-tls-certs\") pod \"867dcce9-cc32-4525-a877-53e6da57a995\" (UID: \"867dcce9-cc32-4525-a877-53e6da57a995\") " Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.024985 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-logs" (OuterVolumeSpecName: "logs") pod "867dcce9-cc32-4525-a877-53e6da57a995" (UID: "867dcce9-cc32-4525-a877-53e6da57a995"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.028760 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "867dcce9-cc32-4525-a877-53e6da57a995" (UID: "867dcce9-cc32-4525-a877-53e6da57a995"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.029390 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-scripts" (OuterVolumeSpecName: "scripts") pod "867dcce9-cc32-4525-a877-53e6da57a995" (UID: "867dcce9-cc32-4525-a877-53e6da57a995"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.030338 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/867dcce9-cc32-4525-a877-53e6da57a995-kube-api-access-dxczl" (OuterVolumeSpecName: "kube-api-access-dxczl") pod "867dcce9-cc32-4525-a877-53e6da57a995" (UID: "867dcce9-cc32-4525-a877-53e6da57a995"). InnerVolumeSpecName "kube-api-access-dxczl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.031508 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "867dcce9-cc32-4525-a877-53e6da57a995" (UID: "867dcce9-cc32-4525-a877-53e6da57a995"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.063938 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "867dcce9-cc32-4525-a877-53e6da57a995" (UID: "867dcce9-cc32-4525-a877-53e6da57a995"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.074378 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-config-data" (OuterVolumeSpecName: "config-data") pod "867dcce9-cc32-4525-a877-53e6da57a995" (UID: "867dcce9-cc32-4525-a877-53e6da57a995"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.088840 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "867dcce9-cc32-4525-a877-53e6da57a995" (UID: "867dcce9-cc32-4525-a877-53e6da57a995"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.127889 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.127919 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxczl\" (UniqueName: \"kubernetes.io/projected/867dcce9-cc32-4525-a877-53e6da57a995-kube-api-access-dxczl\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.127932 4916 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.127968 4916 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.127978 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.127987 4916 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.127995 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/867dcce9-cc32-4525-a877-53e6da57a995-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.128003 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/867dcce9-cc32-4525-a877-53e6da57a995-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.145116 4916 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.229268 4916 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:32 crc kubenswrapper[4916]: E1203 19:49:32.691116 4916 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod867dcce9_cc32_4525_a877_53e6da57a995.slice\": RecentStats: unable to find data in memory cache]" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.817878 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4fe190f9-3a33-4b45-809e-1bbff64ab3fb","Type":"ContainerStarted","Data":"c78345133e3de8f4802024621da8b100d7e6373e8ee6ed90578fe4f5f58f9fbb"} Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.824946 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.825005 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"867dcce9-cc32-4525-a877-53e6da57a995","Type":"ContainerDied","Data":"ebb597b1189fbc144ef07f235497c917254556532115efb13772dba489f37efc"} Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.825044 4916 scope.go:117] "RemoveContainer" containerID="7ac500ee704aac84f1ea37477f98760f62e9cb0d8df0d83de4abc7983cffd3a1" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.850169 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.8501529469999998 podStartE2EDuration="3.850152947s" podCreationTimestamp="2025-12-03 19:49:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:49:32.848964955 +0000 UTC m=+1188.811775221" watchObservedRunningTime="2025-12-03 19:49:32.850152947 +0000 UTC m=+1188.812963213" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.885063 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.897621 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.903908 4916 scope.go:117] "RemoveContainer" containerID="53936c0c5f4f229ab165f895bf54c1279c2f7f0bdbc85a2166a02d7c0765302f" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.911370 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:49:32 crc kubenswrapper[4916]: E1203 19:49:32.912394 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="867dcce9-cc32-4525-a877-53e6da57a995" containerName="glance-httpd" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.912420 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="867dcce9-cc32-4525-a877-53e6da57a995" containerName="glance-httpd" Dec 03 19:49:32 crc kubenswrapper[4916]: E1203 19:49:32.912451 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="867dcce9-cc32-4525-a877-53e6da57a995" containerName="glance-log" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.912460 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="867dcce9-cc32-4525-a877-53e6da57a995" containerName="glance-log" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.913913 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="867dcce9-cc32-4525-a877-53e6da57a995" containerName="glance-httpd" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.913983 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="867dcce9-cc32-4525-a877-53e6da57a995" containerName="glance-log" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.924739 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.949054 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.951228 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 19:49:32 crc kubenswrapper[4916]: I1203 19:49:32.951437 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.040687 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.040952 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.040986 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.041020 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.041038 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.041057 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-logs\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.041077 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt6ml\" (UniqueName: \"kubernetes.io/projected/8d15f4cc-451e-4898-9125-a2ad4f229e3d-kube-api-access-kt6ml\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.041100 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.142491 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.142542 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.142599 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.142622 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.142637 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-logs\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.142660 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt6ml\" (UniqueName: \"kubernetes.io/projected/8d15f4cc-451e-4898-9125-a2ad4f229e3d-kube-api-access-kt6ml\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.142688 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.142746 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.143708 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.144637 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-logs\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.146853 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.154977 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.155936 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.166135 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.166825 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt6ml\" (UniqueName: \"kubernetes.io/projected/8d15f4cc-451e-4898-9125-a2ad4f229e3d-kube-api-access-kt6ml\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.180269 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.200011 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.288632 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.837768 4916 generic.go:334] "Generic (PLEG): container finished" podID="4a166793-92a9-4ad3-95cb-1743fe20d361" containerID="d674940fced82d2aa961795cf7e868b9e49b8545a540155c283aea1156164fb8" exitCode=0 Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.837848 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-j8v5b" event={"ID":"4a166793-92a9-4ad3-95cb-1743fe20d361","Type":"ContainerDied","Data":"d674940fced82d2aa961795cf7e868b9e49b8545a540155c283aea1156164fb8"} Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.843096 4916 generic.go:334] "Generic (PLEG): container finished" podID="64ae3277-4d93-4a36-ba5a-9913bb3e58d7" containerID="c140bef391449e52f103aefaf68bfcfdab5c883b0a860a104a7e44334a63f96e" exitCode=0 Dec 03 19:49:33 crc kubenswrapper[4916]: I1203 19:49:33.843147 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-mvxxd" event={"ID":"64ae3277-4d93-4a36-ba5a-9913bb3e58d7","Type":"ContainerDied","Data":"c140bef391449e52f103aefaf68bfcfdab5c883b0a860a104a7e44334a63f96e"} Dec 03 19:49:34 crc kubenswrapper[4916]: I1203 19:49:34.492993 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="867dcce9-cc32-4525-a877-53e6da57a995" path="/var/lib/kubelet/pods/867dcce9-cc32-4525-a877-53e6da57a995/volumes" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.556282 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-mvxxd" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.559349 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xct2g" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.566891 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-combined-ca-bundle\") pod \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.566945 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-config-data\") pod \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.566984 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2377ccfa-eef8-4809-993d-28cf0320206a-logs\") pod \"2377ccfa-eef8-4809-993d-28cf0320206a\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.567029 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-config-data\") pod \"2377ccfa-eef8-4809-993d-28cf0320206a\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.567061 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjgg8\" (UniqueName: \"kubernetes.io/projected/2377ccfa-eef8-4809-993d-28cf0320206a-kube-api-access-fjgg8\") pod \"2377ccfa-eef8-4809-993d-28cf0320206a\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.567110 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-combined-ca-bundle\") pod \"2377ccfa-eef8-4809-993d-28cf0320206a\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.567214 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p9ktm\" (UniqueName: \"kubernetes.io/projected/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-kube-api-access-p9ktm\") pod \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\" (UID: \"64ae3277-4d93-4a36-ba5a-9913bb3e58d7\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.567240 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-scripts\") pod \"2377ccfa-eef8-4809-993d-28cf0320206a\" (UID: \"2377ccfa-eef8-4809-993d-28cf0320206a\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.568011 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.574906 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2377ccfa-eef8-4809-993d-28cf0320206a-logs" (OuterVolumeSpecName: "logs") pod "2377ccfa-eef8-4809-993d-28cf0320206a" (UID: "2377ccfa-eef8-4809-993d-28cf0320206a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.581377 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-kube-api-access-p9ktm" (OuterVolumeSpecName: "kube-api-access-p9ktm") pod "64ae3277-4d93-4a36-ba5a-9913bb3e58d7" (UID: "64ae3277-4d93-4a36-ba5a-9913bb3e58d7"). InnerVolumeSpecName "kube-api-access-p9ktm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.582331 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2377ccfa-eef8-4809-993d-28cf0320206a-kube-api-access-fjgg8" (OuterVolumeSpecName: "kube-api-access-fjgg8") pod "2377ccfa-eef8-4809-993d-28cf0320206a" (UID: "2377ccfa-eef8-4809-993d-28cf0320206a"). InnerVolumeSpecName "kube-api-access-fjgg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.584966 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-scripts" (OuterVolumeSpecName: "scripts") pod "2377ccfa-eef8-4809-993d-28cf0320206a" (UID: "2377ccfa-eef8-4809-993d-28cf0320206a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.621064 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64ae3277-4d93-4a36-ba5a-9913bb3e58d7" (UID: "64ae3277-4d93-4a36-ba5a-9913bb3e58d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.623712 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2377ccfa-eef8-4809-993d-28cf0320206a" (UID: "2377ccfa-eef8-4809-993d-28cf0320206a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.661889 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-config-data" (OuterVolumeSpecName: "config-data") pod "2377ccfa-eef8-4809-993d-28cf0320206a" (UID: "2377ccfa-eef8-4809-993d-28cf0320206a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.668929 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbmg4\" (UniqueName: \"kubernetes.io/projected/4a166793-92a9-4ad3-95cb-1743fe20d361-kube-api-access-lbmg4\") pod \"4a166793-92a9-4ad3-95cb-1743fe20d361\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669003 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-scripts\") pod \"4a166793-92a9-4ad3-95cb-1743fe20d361\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669033 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-config-data\") pod \"4a166793-92a9-4ad3-95cb-1743fe20d361\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669178 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-combined-ca-bundle\") pod \"4a166793-92a9-4ad3-95cb-1743fe20d361\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669219 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-fernet-keys\") pod \"4a166793-92a9-4ad3-95cb-1743fe20d361\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669255 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-credential-keys\") pod \"4a166793-92a9-4ad3-95cb-1743fe20d361\" (UID: \"4a166793-92a9-4ad3-95cb-1743fe20d361\") " Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669581 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p9ktm\" (UniqueName: \"kubernetes.io/projected/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-kube-api-access-p9ktm\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669598 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669607 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669615 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2377ccfa-eef8-4809-993d-28cf0320206a-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669625 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669636 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjgg8\" (UniqueName: \"kubernetes.io/projected/2377ccfa-eef8-4809-993d-28cf0320206a-kube-api-access-fjgg8\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.669644 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2377ccfa-eef8-4809-993d-28cf0320206a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.675897 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4a166793-92a9-4ad3-95cb-1743fe20d361" (UID: "4a166793-92a9-4ad3-95cb-1743fe20d361"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.676069 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a166793-92a9-4ad3-95cb-1743fe20d361-kube-api-access-lbmg4" (OuterVolumeSpecName: "kube-api-access-lbmg4") pod "4a166793-92a9-4ad3-95cb-1743fe20d361" (UID: "4a166793-92a9-4ad3-95cb-1743fe20d361"). InnerVolumeSpecName "kube-api-access-lbmg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.676178 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-scripts" (OuterVolumeSpecName: "scripts") pod "4a166793-92a9-4ad3-95cb-1743fe20d361" (UID: "4a166793-92a9-4ad3-95cb-1743fe20d361"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.676138 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4a166793-92a9-4ad3-95cb-1743fe20d361" (UID: "4a166793-92a9-4ad3-95cb-1743fe20d361"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.707273 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-config-data" (OuterVolumeSpecName: "config-data") pod "4a166793-92a9-4ad3-95cb-1743fe20d361" (UID: "4a166793-92a9-4ad3-95cb-1743fe20d361"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.709787 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a166793-92a9-4ad3-95cb-1743fe20d361" (UID: "4a166793-92a9-4ad3-95cb-1743fe20d361"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.722280 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-config-data" (OuterVolumeSpecName: "config-data") pod "64ae3277-4d93-4a36-ba5a-9913bb3e58d7" (UID: "64ae3277-4d93-4a36-ba5a-9913bb3e58d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.771028 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.771152 4916 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.771204 4916 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.771252 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64ae3277-4d93-4a36-ba5a-9913bb3e58d7-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.771300 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbmg4\" (UniqueName: \"kubernetes.io/projected/4a166793-92a9-4ad3-95cb-1743fe20d361-kube-api-access-lbmg4\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.771349 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.771405 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a166793-92a9-4ad3-95cb-1743fe20d361-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.917659 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-j8v5b" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.918610 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-j8v5b" event={"ID":"4a166793-92a9-4ad3-95cb-1743fe20d361","Type":"ContainerDied","Data":"35049cc263b99cd259ac7cdaa297937de029e18fcf7836656614e3715de5f78c"} Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.918653 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35049cc263b99cd259ac7cdaa297937de029e18fcf7836656614e3715de5f78c" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.922914 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-xct2g" event={"ID":"2377ccfa-eef8-4809-993d-28cf0320206a","Type":"ContainerDied","Data":"caa4f7afc503c792c695d3cdc78be6131aefd4e7d22c0b504129514b9045b36e"} Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.922942 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="caa4f7afc503c792c695d3cdc78be6131aefd4e7d22c0b504129514b9045b36e" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.922998 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-xct2g" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.931431 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-mvxxd" event={"ID":"64ae3277-4d93-4a36-ba5a-9913bb3e58d7","Type":"ContainerDied","Data":"18c8327bb634ff7f7b2f1abebbfdefabc4de1a17ce8641d745ec2c2c73a01957"} Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.931644 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18c8327bb634ff7f7b2f1abebbfdefabc4de1a17ce8641d745ec2c2c73a01957" Dec 03 19:49:37 crc kubenswrapper[4916]: I1203 19:49:37.931743 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-mvxxd" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.034356 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:49:38 crc kubenswrapper[4916]: W1203 19:49:38.042600 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d15f4cc_451e_4898_9125_a2ad4f229e3d.slice/crio-a06a0e3cb762004a595c95fee45c0c7b106948b82042f44cb0db525dd84e72bf WatchSource:0}: Error finding container a06a0e3cb762004a595c95fee45c0c7b106948b82042f44cb0db525dd84e72bf: Status 404 returned error can't find the container with id a06a0e3cb762004a595c95fee45c0c7b106948b82042f44cb0db525dd84e72bf Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.718270 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7c48bb485f-tqvlz"] Dec 03 19:49:38 crc kubenswrapper[4916]: E1203 19:49:38.719011 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2377ccfa-eef8-4809-993d-28cf0320206a" containerName="placement-db-sync" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.719027 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="2377ccfa-eef8-4809-993d-28cf0320206a" containerName="placement-db-sync" Dec 03 19:49:38 crc kubenswrapper[4916]: E1203 19:49:38.719058 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64ae3277-4d93-4a36-ba5a-9913bb3e58d7" containerName="heat-db-sync" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.719066 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="64ae3277-4d93-4a36-ba5a-9913bb3e58d7" containerName="heat-db-sync" Dec 03 19:49:38 crc kubenswrapper[4916]: E1203 19:49:38.719083 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a166793-92a9-4ad3-95cb-1743fe20d361" containerName="keystone-bootstrap" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.719092 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a166793-92a9-4ad3-95cb-1743fe20d361" containerName="keystone-bootstrap" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.719307 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="2377ccfa-eef8-4809-993d-28cf0320206a" containerName="placement-db-sync" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.719335 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="64ae3277-4d93-4a36-ba5a-9913bb3e58d7" containerName="heat-db-sync" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.719353 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a166793-92a9-4ad3-95cb-1743fe20d361" containerName="keystone-bootstrap" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.720041 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.723335 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.723455 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.723621 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.723826 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.723978 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-hvlw8" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.724507 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.745721 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7c48bb485f-tqvlz"] Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.765907 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5747d5b464-dtdts"] Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.767707 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.774399 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.774668 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-2dvnk" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.774787 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.774899 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.775146 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.788726 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6858994-e73c-4542-9cb1-5bb0213f35bf-logs\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.790037 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-public-tls-certs\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.790137 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-config-data\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.790204 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-combined-ca-bundle\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.790304 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qwm6\" (UniqueName: \"kubernetes.io/projected/b6858994-e73c-4542-9cb1-5bb0213f35bf-kube-api-access-6qwm6\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.790459 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-fernet-keys\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.790644 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-config-data\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.790719 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-scripts\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.790778 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-credential-keys\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.790855 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-scripts\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.790933 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-internal-tls-certs\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.791010 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-public-tls-certs\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.791083 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4xw8\" (UniqueName: \"kubernetes.io/projected/288ed5cf-795f-44fd-8ae8-ba522e48a62e-kube-api-access-f4xw8\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.791159 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-combined-ca-bundle\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.791234 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-internal-tls-certs\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.792732 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5747d5b464-dtdts"] Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.894173 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-fernet-keys\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.894460 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-config-data\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.894561 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-scripts\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.894699 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-credential-keys\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.894795 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-scripts\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.894892 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-internal-tls-certs\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.895001 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-public-tls-certs\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.895107 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4xw8\" (UniqueName: \"kubernetes.io/projected/288ed5cf-795f-44fd-8ae8-ba522e48a62e-kube-api-access-f4xw8\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.895205 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-combined-ca-bundle\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.895455 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-internal-tls-certs\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.895591 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6858994-e73c-4542-9cb1-5bb0213f35bf-logs\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.895890 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-public-tls-certs\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.896289 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-config-data\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.897348 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-combined-ca-bundle\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.897512 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qwm6\" (UniqueName: \"kubernetes.io/projected/b6858994-e73c-4542-9cb1-5bb0213f35bf-kube-api-access-6qwm6\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.898333 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6858994-e73c-4542-9cb1-5bb0213f35bf-logs\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.901878 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-internal-tls-certs\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.901984 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-scripts\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.904427 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-scripts\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.904839 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-fernet-keys\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.905613 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-combined-ca-bundle\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.908181 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-public-tls-certs\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.908282 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-internal-tls-certs\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.914200 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-credential-keys\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.915300 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-combined-ca-bundle\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.915406 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-config-data\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.920383 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6858994-e73c-4542-9cb1-5bb0213f35bf-config-data\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.925038 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/288ed5cf-795f-44fd-8ae8-ba522e48a62e-public-tls-certs\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.926944 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4xw8\" (UniqueName: \"kubernetes.io/projected/288ed5cf-795f-44fd-8ae8-ba522e48a62e-kube-api-access-f4xw8\") pod \"keystone-7c48bb485f-tqvlz\" (UID: \"288ed5cf-795f-44fd-8ae8-ba522e48a62e\") " pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.931493 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qwm6\" (UniqueName: \"kubernetes.io/projected/b6858994-e73c-4542-9cb1-5bb0213f35bf-kube-api-access-6qwm6\") pod \"placement-5747d5b464-dtdts\" (UID: \"b6858994-e73c-4542-9cb1-5bb0213f35bf\") " pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.957271 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d15f4cc-451e-4898-9125-a2ad4f229e3d","Type":"ContainerStarted","Data":"2c5d60658a726176a4518d7247e35b98c1e3353c2719e48484db0fe06770760d"} Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.957327 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d15f4cc-451e-4898-9125-a2ad4f229e3d","Type":"ContainerStarted","Data":"a06a0e3cb762004a595c95fee45c0c7b106948b82042f44cb0db525dd84e72bf"} Dec 03 19:49:38 crc kubenswrapper[4916]: I1203 19:49:38.959818 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fcf233ee-1271-4a7a-9fcb-5a7725746aa2","Type":"ContainerStarted","Data":"069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6"} Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.036198 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.089103 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.538533 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7c48bb485f-tqvlz"] Dec 03 19:49:39 crc kubenswrapper[4916]: W1203 19:49:39.551553 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod288ed5cf_795f_44fd_8ae8_ba522e48a62e.slice/crio-41a1d4a268ff8e632cca83d1ed0fe81add812af3356078b3df73377c4d85ac54 WatchSource:0}: Error finding container 41a1d4a268ff8e632cca83d1ed0fe81add812af3356078b3df73377c4d85ac54: Status 404 returned error can't find the container with id 41a1d4a268ff8e632cca83d1ed0fe81add812af3356078b3df73377c4d85ac54 Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.628998 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5747d5b464-dtdts"] Dec 03 19:49:39 crc kubenswrapper[4916]: W1203 19:49:39.645016 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6858994_e73c_4542_9cb1_5bb0213f35bf.slice/crio-d50c292e6220e231336e3c7cd56bad4362bf539abd57758401664501178a0980 WatchSource:0}: Error finding container d50c292e6220e231336e3c7cd56bad4362bf539abd57758401664501178a0980: Status 404 returned error can't find the container with id d50c292e6220e231336e3c7cd56bad4362bf539abd57758401664501178a0980 Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.977361 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d15f4cc-451e-4898-9125-a2ad4f229e3d","Type":"ContainerStarted","Data":"52ac206e19a957b4b35869ab6f3dd919d94e86246741828c8bb387e27574fa3a"} Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.980970 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7c48bb485f-tqvlz" event={"ID":"288ed5cf-795f-44fd-8ae8-ba522e48a62e","Type":"ContainerStarted","Data":"2101743d544e9c5bcdb15137981cf7d3afa95576124621ebdef744e01061151e"} Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.981007 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7c48bb485f-tqvlz" event={"ID":"288ed5cf-795f-44fd-8ae8-ba522e48a62e","Type":"ContainerStarted","Data":"41a1d4a268ff8e632cca83d1ed0fe81add812af3356078b3df73377c4d85ac54"} Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.981464 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.982986 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5747d5b464-dtdts" event={"ID":"b6858994-e73c-4542-9cb1-5bb0213f35bf","Type":"ContainerStarted","Data":"c3f8163eabdfa51dd8c713538614fa5d1ae5e02b688b9c7c517da5a9e7414338"} Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.983020 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5747d5b464-dtdts" event={"ID":"b6858994-e73c-4542-9cb1-5bb0213f35bf","Type":"ContainerStarted","Data":"d50c292e6220e231336e3c7cd56bad4362bf539abd57758401664501178a0980"} Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.986704 4916 generic.go:334] "Generic (PLEG): container finished" podID="d3c82cbe-38cf-4d4c-b18c-9b296776cb5c" containerID="84f1626ea5cf3e3cbd1d94f33ef8c72b5c7d1c136b59a64cdf8764c511f3ea9d" exitCode=0 Dec 03 19:49:39 crc kubenswrapper[4916]: I1203 19:49:39.986736 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-lhffz" event={"ID":"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c","Type":"ContainerDied","Data":"84f1626ea5cf3e3cbd1d94f33ef8c72b5c7d1c136b59a64cdf8764c511f3ea9d"} Dec 03 19:49:40 crc kubenswrapper[4916]: I1203 19:49:40.002734 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.002715648 podStartE2EDuration="8.002715648s" podCreationTimestamp="2025-12-03 19:49:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:49:39.996211315 +0000 UTC m=+1195.959021591" watchObservedRunningTime="2025-12-03 19:49:40.002715648 +0000 UTC m=+1195.965525914" Dec 03 19:49:40 crc kubenswrapper[4916]: I1203 19:49:40.040824 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7c48bb485f-tqvlz" podStartSLOduration=2.040808374 podStartE2EDuration="2.040808374s" podCreationTimestamp="2025-12-03 19:49:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:49:40.036711954 +0000 UTC m=+1195.999522210" watchObservedRunningTime="2025-12-03 19:49:40.040808374 +0000 UTC m=+1196.003618640" Dec 03 19:49:40 crc kubenswrapper[4916]: I1203 19:49:40.167880 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 19:49:40 crc kubenswrapper[4916]: I1203 19:49:40.167929 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 19:49:40 crc kubenswrapper[4916]: I1203 19:49:40.199251 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 19:49:40 crc kubenswrapper[4916]: I1203 19:49:40.210268 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.007967 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5747d5b464-dtdts" event={"ID":"b6858994-e73c-4542-9cb1-5bb0213f35bf","Type":"ContainerStarted","Data":"c9bf3311b75417ddf56f65294fc2c8d2c211b572d2a65e71658e1572334b2a0e"} Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.009357 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.009375 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.042736 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5747d5b464-dtdts" podStartSLOduration=3.042711168 podStartE2EDuration="3.042711168s" podCreationTimestamp="2025-12-03 19:49:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:49:41.041396363 +0000 UTC m=+1197.004206629" watchObservedRunningTime="2025-12-03 19:49:41.042711168 +0000 UTC m=+1197.005521444" Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.350090 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-lhffz" Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.464364 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-config\") pod \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.464516 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-combined-ca-bundle\") pod \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.464636 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47bpf\" (UniqueName: \"kubernetes.io/projected/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-kube-api-access-47bpf\") pod \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\" (UID: \"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c\") " Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.469087 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-kube-api-access-47bpf" (OuterVolumeSpecName: "kube-api-access-47bpf") pod "d3c82cbe-38cf-4d4c-b18c-9b296776cb5c" (UID: "d3c82cbe-38cf-4d4c-b18c-9b296776cb5c"). InnerVolumeSpecName "kube-api-access-47bpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.490506 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-config" (OuterVolumeSpecName: "config") pod "d3c82cbe-38cf-4d4c-b18c-9b296776cb5c" (UID: "d3c82cbe-38cf-4d4c-b18c-9b296776cb5c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.500492 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3c82cbe-38cf-4d4c-b18c-9b296776cb5c" (UID: "d3c82cbe-38cf-4d4c-b18c-9b296776cb5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.567255 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.567282 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47bpf\" (UniqueName: \"kubernetes.io/projected/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-kube-api-access-47bpf\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:41 crc kubenswrapper[4916]: I1203 19:49:41.567294 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.017833 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-lhffz" event={"ID":"d3c82cbe-38cf-4d4c-b18c-9b296776cb5c","Type":"ContainerDied","Data":"b826497f95faa5888f6e2656fc2800e171d6bf716e599dcdfe29bc9834138ee9"} Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.018136 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b826497f95faa5888f6e2656fc2800e171d6bf716e599dcdfe29bc9834138ee9" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.018035 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-lhffz" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.019869 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.019905 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.275681 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-px5fk"] Dec 03 19:49:42 crc kubenswrapper[4916]: E1203 19:49:42.276063 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c82cbe-38cf-4d4c-b18c-9b296776cb5c" containerName="neutron-db-sync" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.276076 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c82cbe-38cf-4d4c-b18c-9b296776cb5c" containerName="neutron-db-sync" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.276281 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3c82cbe-38cf-4d4c-b18c-9b296776cb5c" containerName="neutron-db-sync" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.277147 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.297698 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-px5fk"] Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.379712 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-config\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.380094 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-svc\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.380195 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.380330 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzksq\" (UniqueName: \"kubernetes.io/projected/cb050870-526a-4489-a369-88fe8f92a432-kube-api-access-pzksq\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.380433 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.380595 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.391788 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6c6cb986d4-9jlqh"] Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.393351 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.401113 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.401305 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.401414 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.401810 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-h4fp6" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.437017 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6c6cb986d4-9jlqh"] Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.482019 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.482115 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-config\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.482139 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-svc\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.482156 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.482194 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzksq\" (UniqueName: \"kubernetes.io/projected/cb050870-526a-4489-a369-88fe8f92a432-kube-api-access-pzksq\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.482216 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.483111 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.483241 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.483503 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.483833 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-config\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.484038 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-svc\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.505503 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzksq\" (UniqueName: \"kubernetes.io/projected/cb050870-526a-4489-a369-88fe8f92a432-kube-api-access-pzksq\") pod \"dnsmasq-dns-55f844cf75-px5fk\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.584022 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-httpd-config\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.584106 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-ovndb-tls-certs\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.584124 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7xxt\" (UniqueName: \"kubernetes.io/projected/cbf2db2c-002e-431b-9238-071ac2e81d4d-kube-api-access-f7xxt\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.584143 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-config\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.584236 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-combined-ca-bundle\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.600948 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.686238 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-ovndb-tls-certs\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.686283 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7xxt\" (UniqueName: \"kubernetes.io/projected/cbf2db2c-002e-431b-9238-071ac2e81d4d-kube-api-access-f7xxt\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.686304 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-config\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.686385 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-combined-ca-bundle\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.686412 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-httpd-config\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.690639 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-httpd-config\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.690742 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-config\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.691308 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-ovndb-tls-certs\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.694318 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-combined-ca-bundle\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.703470 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7xxt\" (UniqueName: \"kubernetes.io/projected/cbf2db2c-002e-431b-9238-071ac2e81d4d-kube-api-access-f7xxt\") pod \"neutron-6c6cb986d4-9jlqh\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.748282 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.953591 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 19:49:42 crc kubenswrapper[4916]: I1203 19:49:42.955812 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 19:49:43 crc kubenswrapper[4916]: I1203 19:49:43.288993 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:43 crc kubenswrapper[4916]: I1203 19:49:43.289350 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:43 crc kubenswrapper[4916]: I1203 19:49:43.334330 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:43 crc kubenswrapper[4916]: I1203 19:49:43.338713 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.060448 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.060823 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.312383 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6b599f5db5-cs2bs"] Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.314201 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.315732 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.315883 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.331044 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b599f5db5-cs2bs"] Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.418114 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-httpd-config\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.418161 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pg9r\" (UniqueName: \"kubernetes.io/projected/c924271a-a9cb-45cd-b1ab-3631a27c81aa-kube-api-access-6pg9r\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.418224 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-combined-ca-bundle\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.418249 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-ovndb-tls-certs\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.418299 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-public-tls-certs\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.418326 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-internal-tls-certs\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.418342 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-config\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.520389 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-httpd-config\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.520436 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pg9r\" (UniqueName: \"kubernetes.io/projected/c924271a-a9cb-45cd-b1ab-3631a27c81aa-kube-api-access-6pg9r\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.520548 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-combined-ca-bundle\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.520617 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-ovndb-tls-certs\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.520689 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-public-tls-certs\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.520734 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-internal-tls-certs\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.520761 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-config\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.523277 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.523825 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.527048 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-ovndb-tls-certs\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.527897 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-combined-ca-bundle\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.532476 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-config\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.536666 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-internal-tls-certs\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.538830 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-httpd-config\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.539367 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pg9r\" (UniqueName: \"kubernetes.io/projected/c924271a-a9cb-45cd-b1ab-3631a27c81aa-kube-api-access-6pg9r\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.542642 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c924271a-a9cb-45cd-b1ab-3631a27c81aa-public-tls-certs\") pod \"neutron-6b599f5db5-cs2bs\" (UID: \"c924271a-a9cb-45cd-b1ab-3631a27c81aa\") " pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:44 crc kubenswrapper[4916]: I1203 19:49:44.680455 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:46 crc kubenswrapper[4916]: I1203 19:49:46.129796 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:46 crc kubenswrapper[4916]: I1203 19:49:46.130170 4916 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 19:49:46 crc kubenswrapper[4916]: I1203 19:49:46.131625 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 19:49:49 crc kubenswrapper[4916]: E1203 19:49:49.178841 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" Dec 03 19:49:49 crc kubenswrapper[4916]: I1203 19:49:49.430002 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6b599f5db5-cs2bs"] Dec 03 19:49:49 crc kubenswrapper[4916]: W1203 19:49:49.434254 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc924271a_a9cb_45cd_b1ab_3631a27c81aa.slice/crio-a6ca874613c87464960867fd1c753861868121802ee0baa9de9aeee04440ac8e WatchSource:0}: Error finding container a6ca874613c87464960867fd1c753861868121802ee0baa9de9aeee04440ac8e: Status 404 returned error can't find the container with id a6ca874613c87464960867fd1c753861868121802ee0baa9de9aeee04440ac8e Dec 03 19:49:49 crc kubenswrapper[4916]: I1203 19:49:49.508430 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-px5fk"] Dec 03 19:49:49 crc kubenswrapper[4916]: I1203 19:49:49.588049 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6c6cb986d4-9jlqh"] Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.116875 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c6cb986d4-9jlqh" event={"ID":"cbf2db2c-002e-431b-9238-071ac2e81d4d","Type":"ContainerStarted","Data":"72e45ef09758b2d244eb02d9ea27fedb602fc0ad50dd520de1d82f9df2d2d07e"} Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.117369 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c6cb986d4-9jlqh" event={"ID":"cbf2db2c-002e-431b-9238-071ac2e81d4d","Type":"ContainerStarted","Data":"059635875b6f80999bff6128bb8ddd6ed4c86828b42c5b990f68604d5ac362c6"} Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.120486 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fcf233ee-1271-4a7a-9fcb-5a7725746aa2","Type":"ContainerStarted","Data":"dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3"} Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.120612 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="ceilometer-notification-agent" containerID="cri-o://94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7" gracePeriod=30 Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.120638 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.120672 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="proxy-httpd" containerID="cri-o://dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3" gracePeriod=30 Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.120687 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="sg-core" containerID="cri-o://069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6" gracePeriod=30 Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.125227 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-q6cjh" event={"ID":"0d5347c1-1439-4284-977d-390912ffe9a5","Type":"ContainerStarted","Data":"4ce90c5a72df412f20c1ccc631511f59485ef147810e3d59b97ebcb6a821ce59"} Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.127457 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-l4sh2" event={"ID":"7eac938e-d147-4214-a0b1-4a17ac69b649","Type":"ContainerStarted","Data":"90b20d3334567229aba4e98e42071ef308818e8ba18253da6b66ae22e934cf35"} Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.128940 4916 generic.go:334] "Generic (PLEG): container finished" podID="cb050870-526a-4489-a369-88fe8f92a432" containerID="f2e5261c15edbf101d85b1cb1f8d12bc77e4b989a782c6c18ef4266ee705400b" exitCode=0 Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.129046 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" event={"ID":"cb050870-526a-4489-a369-88fe8f92a432","Type":"ContainerDied","Data":"f2e5261c15edbf101d85b1cb1f8d12bc77e4b989a782c6c18ef4266ee705400b"} Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.129068 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" event={"ID":"cb050870-526a-4489-a369-88fe8f92a432","Type":"ContainerStarted","Data":"d68975feb43fce356f646a3eac5c91123e6e7e2e762084167aafc2da4f9687b1"} Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.133857 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b599f5db5-cs2bs" event={"ID":"c924271a-a9cb-45cd-b1ab-3631a27c81aa","Type":"ContainerStarted","Data":"768fe24d21cd4b08aadc6350d4a5d256325c7e5dd511fd5e201e213033d5c807"} Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.133902 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b599f5db5-cs2bs" event={"ID":"c924271a-a9cb-45cd-b1ab-3631a27c81aa","Type":"ContainerStarted","Data":"7dc6d08bb95af3a26843e322fc32e4f15c74c80b3523873eb523c0d627132125"} Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.133931 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6b599f5db5-cs2bs" event={"ID":"c924271a-a9cb-45cd-b1ab-3631a27c81aa","Type":"ContainerStarted","Data":"a6ca874613c87464960867fd1c753861868121802ee0baa9de9aeee04440ac8e"} Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.134016 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.207095 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6b599f5db5-cs2bs" podStartSLOduration=6.207075751 podStartE2EDuration="6.207075751s" podCreationTimestamp="2025-12-03 19:49:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:49:50.166272013 +0000 UTC m=+1206.129082279" watchObservedRunningTime="2025-12-03 19:49:50.207075751 +0000 UTC m=+1206.169886017" Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.224238 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-l4sh2" podStartSLOduration=3.36684832 podStartE2EDuration="56.224221388s" podCreationTimestamp="2025-12-03 19:48:54 +0000 UTC" firstStartedPulling="2025-12-03 19:48:56.10245164 +0000 UTC m=+1152.065261906" lastFinishedPulling="2025-12-03 19:49:48.959824668 +0000 UTC m=+1204.922634974" observedRunningTime="2025-12-03 19:49:50.204074561 +0000 UTC m=+1206.166884827" watchObservedRunningTime="2025-12-03 19:49:50.224221388 +0000 UTC m=+1206.187031644" Dec 03 19:49:50 crc kubenswrapper[4916]: I1203 19:49:50.233177 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-q6cjh" podStartSLOduration=3.6215042410000002 podStartE2EDuration="55.233161556s" podCreationTimestamp="2025-12-03 19:48:55 +0000 UTC" firstStartedPulling="2025-12-03 19:48:57.346674713 +0000 UTC m=+1153.309484979" lastFinishedPulling="2025-12-03 19:49:48.958332018 +0000 UTC m=+1204.921142294" observedRunningTime="2025-12-03 19:49:50.215107685 +0000 UTC m=+1206.177917951" watchObservedRunningTime="2025-12-03 19:49:50.233161556 +0000 UTC m=+1206.195971822" Dec 03 19:49:51 crc kubenswrapper[4916]: I1203 19:49:51.147242 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c6cb986d4-9jlqh" event={"ID":"cbf2db2c-002e-431b-9238-071ac2e81d4d","Type":"ContainerStarted","Data":"422309a2a61c801173de90807be0619c19db943ae3e5888ffa1bf023b2730ad6"} Dec 03 19:49:51 crc kubenswrapper[4916]: I1203 19:49:51.147811 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:49:51 crc kubenswrapper[4916]: I1203 19:49:51.150487 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" event={"ID":"cb050870-526a-4489-a369-88fe8f92a432","Type":"ContainerStarted","Data":"5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168"} Dec 03 19:49:51 crc kubenswrapper[4916]: I1203 19:49:51.151146 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:51 crc kubenswrapper[4916]: I1203 19:49:51.154208 4916 generic.go:334] "Generic (PLEG): container finished" podID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerID="dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3" exitCode=0 Dec 03 19:49:51 crc kubenswrapper[4916]: I1203 19:49:51.154282 4916 generic.go:334] "Generic (PLEG): container finished" podID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerID="069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6" exitCode=2 Dec 03 19:49:51 crc kubenswrapper[4916]: I1203 19:49:51.154343 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fcf233ee-1271-4a7a-9fcb-5a7725746aa2","Type":"ContainerDied","Data":"dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3"} Dec 03 19:49:51 crc kubenswrapper[4916]: I1203 19:49:51.154380 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fcf233ee-1271-4a7a-9fcb-5a7725746aa2","Type":"ContainerDied","Data":"069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6"} Dec 03 19:49:51 crc kubenswrapper[4916]: I1203 19:49:51.190279 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6c6cb986d4-9jlqh" podStartSLOduration=9.190256756 podStartE2EDuration="9.190256756s" podCreationTimestamp="2025-12-03 19:49:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:49:51.178097182 +0000 UTC m=+1207.140907458" watchObservedRunningTime="2025-12-03 19:49:51.190256756 +0000 UTC m=+1207.153067032" Dec 03 19:49:51 crc kubenswrapper[4916]: I1203 19:49:51.218679 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" podStartSLOduration=9.218644813 podStartE2EDuration="9.218644813s" podCreationTimestamp="2025-12-03 19:49:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:49:51.215119279 +0000 UTC m=+1207.177929545" watchObservedRunningTime="2025-12-03 19:49:51.218644813 +0000 UTC m=+1207.181455079" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.090207 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.185381 4916 generic.go:334] "Generic (PLEG): container finished" podID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerID="94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7" exitCode=0 Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.185488 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fcf233ee-1271-4a7a-9fcb-5a7725746aa2","Type":"ContainerDied","Data":"94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7"} Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.185552 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fcf233ee-1271-4a7a-9fcb-5a7725746aa2","Type":"ContainerDied","Data":"6b19e06dd45a6f0570d7965b5102b4f8cd8b3ef8c2fe6db9d2e8aafe669562f0"} Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.185614 4916 scope.go:117] "RemoveContainer" containerID="dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.185780 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.188015 4916 generic.go:334] "Generic (PLEG): container finished" podID="0d5347c1-1439-4284-977d-390912ffe9a5" containerID="4ce90c5a72df412f20c1ccc631511f59485ef147810e3d59b97ebcb6a821ce59" exitCode=0 Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.189261 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-q6cjh" event={"ID":"0d5347c1-1439-4284-977d-390912ffe9a5","Type":"ContainerDied","Data":"4ce90c5a72df412f20c1ccc631511f59485ef147810e3d59b97ebcb6a821ce59"} Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.194341 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-scripts\") pod \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.194407 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-log-httpd\") pod \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.194601 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9nmj\" (UniqueName: \"kubernetes.io/projected/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-kube-api-access-v9nmj\") pod \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.194636 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-run-httpd\") pod \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.194683 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-config-data\") pod \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.194749 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-combined-ca-bundle\") pod \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.194784 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-sg-core-conf-yaml\") pod \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\" (UID: \"fcf233ee-1271-4a7a-9fcb-5a7725746aa2\") " Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.195202 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fcf233ee-1271-4a7a-9fcb-5a7725746aa2" (UID: "fcf233ee-1271-4a7a-9fcb-5a7725746aa2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.195231 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fcf233ee-1271-4a7a-9fcb-5a7725746aa2" (UID: "fcf233ee-1271-4a7a-9fcb-5a7725746aa2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.200297 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-scripts" (OuterVolumeSpecName: "scripts") pod "fcf233ee-1271-4a7a-9fcb-5a7725746aa2" (UID: "fcf233ee-1271-4a7a-9fcb-5a7725746aa2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.208896 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-kube-api-access-v9nmj" (OuterVolumeSpecName: "kube-api-access-v9nmj") pod "fcf233ee-1271-4a7a-9fcb-5a7725746aa2" (UID: "fcf233ee-1271-4a7a-9fcb-5a7725746aa2"). InnerVolumeSpecName "kube-api-access-v9nmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.248113 4916 scope.go:117] "RemoveContainer" containerID="069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.276117 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fcf233ee-1271-4a7a-9fcb-5a7725746aa2" (UID: "fcf233ee-1271-4a7a-9fcb-5a7725746aa2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.297624 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.297926 4916 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.297989 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9nmj\" (UniqueName: \"kubernetes.io/projected/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-kube-api-access-v9nmj\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.298043 4916 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.298093 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.313704 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "fcf233ee-1271-4a7a-9fcb-5a7725746aa2" (UID: "fcf233ee-1271-4a7a-9fcb-5a7725746aa2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.326425 4916 scope.go:117] "RemoveContainer" containerID="94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.329653 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-config-data" (OuterVolumeSpecName: "config-data") pod "fcf233ee-1271-4a7a-9fcb-5a7725746aa2" (UID: "fcf233ee-1271-4a7a-9fcb-5a7725746aa2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.369903 4916 scope.go:117] "RemoveContainer" containerID="dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3" Dec 03 19:49:53 crc kubenswrapper[4916]: E1203 19:49:53.373662 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3\": container with ID starting with dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3 not found: ID does not exist" containerID="dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.373701 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3"} err="failed to get container status \"dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3\": rpc error: code = NotFound desc = could not find container \"dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3\": container with ID starting with dd8863a8b2987494bbd3b606ff5b79dea2636dbb903f4dc2ffd212988ff027f3 not found: ID does not exist" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.373726 4916 scope.go:117] "RemoveContainer" containerID="069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6" Dec 03 19:49:53 crc kubenswrapper[4916]: E1203 19:49:53.373997 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6\": container with ID starting with 069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6 not found: ID does not exist" containerID="069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.374016 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6"} err="failed to get container status \"069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6\": rpc error: code = NotFound desc = could not find container \"069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6\": container with ID starting with 069332031f040c9a34907f3fcd812ca78c76a20305cc137fe66eb81595df4dd6 not found: ID does not exist" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.374029 4916 scope.go:117] "RemoveContainer" containerID="94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7" Dec 03 19:49:53 crc kubenswrapper[4916]: E1203 19:49:53.377664 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7\": container with ID starting with 94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7 not found: ID does not exist" containerID="94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.377699 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7"} err="failed to get container status \"94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7\": rpc error: code = NotFound desc = could not find container \"94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7\": container with ID starting with 94fd8ec499be79ade0cd5bef5800635246219e9a451d5e6016d612c686bf06b7 not found: ID does not exist" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.399811 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.399853 4916 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fcf233ee-1271-4a7a-9fcb-5a7725746aa2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.597345 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.619298 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.620933 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:49:53 crc kubenswrapper[4916]: E1203 19:49:53.621767 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="proxy-httpd" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.621799 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="proxy-httpd" Dec 03 19:49:53 crc kubenswrapper[4916]: E1203 19:49:53.621834 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="sg-core" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.621847 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="sg-core" Dec 03 19:49:53 crc kubenswrapper[4916]: E1203 19:49:53.621877 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="ceilometer-notification-agent" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.621889 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="ceilometer-notification-agent" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.622123 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="sg-core" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.622152 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="proxy-httpd" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.622172 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" containerName="ceilometer-notification-agent" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.624960 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.627514 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.627826 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.630772 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.703301 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-run-httpd\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.703357 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-log-httpd\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.703458 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wttlq\" (UniqueName: \"kubernetes.io/projected/f39cbb12-e84d-4f98-a410-86f3103ea424-kube-api-access-wttlq\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.703696 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.703798 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.703905 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-scripts\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.703995 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-config-data\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.805736 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.805795 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-scripts\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.805830 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-config-data\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.805887 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-run-httpd\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.805904 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-log-httpd\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.805937 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wttlq\" (UniqueName: \"kubernetes.io/projected/f39cbb12-e84d-4f98-a410-86f3103ea424-kube-api-access-wttlq\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.805984 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.806379 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-run-httpd\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.806538 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-log-httpd\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.810159 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.810646 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.810957 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-config-data\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.810966 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-scripts\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:53 crc kubenswrapper[4916]: I1203 19:49:53.826255 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wttlq\" (UniqueName: \"kubernetes.io/projected/f39cbb12-e84d-4f98-a410-86f3103ea424-kube-api-access-wttlq\") pod \"ceilometer-0\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " pod="openstack/ceilometer-0" Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.005820 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.459499 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.501278 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcf233ee-1271-4a7a-9fcb-5a7725746aa2" path="/var/lib/kubelet/pods/fcf233ee-1271-4a7a-9fcb-5a7725746aa2/volumes" Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.509526 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.516151 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skwrb\" (UniqueName: \"kubernetes.io/projected/0d5347c1-1439-4284-977d-390912ffe9a5-kube-api-access-skwrb\") pod \"0d5347c1-1439-4284-977d-390912ffe9a5\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.516243 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-db-sync-config-data\") pod \"0d5347c1-1439-4284-977d-390912ffe9a5\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.516444 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-combined-ca-bundle\") pod \"0d5347c1-1439-4284-977d-390912ffe9a5\" (UID: \"0d5347c1-1439-4284-977d-390912ffe9a5\") " Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.524778 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d5347c1-1439-4284-977d-390912ffe9a5-kube-api-access-skwrb" (OuterVolumeSpecName: "kube-api-access-skwrb") pod "0d5347c1-1439-4284-977d-390912ffe9a5" (UID: "0d5347c1-1439-4284-977d-390912ffe9a5"). InnerVolumeSpecName "kube-api-access-skwrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.528999 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "0d5347c1-1439-4284-977d-390912ffe9a5" (UID: "0d5347c1-1439-4284-977d-390912ffe9a5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.551796 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d5347c1-1439-4284-977d-390912ffe9a5" (UID: "0d5347c1-1439-4284-977d-390912ffe9a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.618034 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.618078 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skwrb\" (UniqueName: \"kubernetes.io/projected/0d5347c1-1439-4284-977d-390912ffe9a5-kube-api-access-skwrb\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:54 crc kubenswrapper[4916]: I1203 19:49:54.618092 4916 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0d5347c1-1439-4284-977d-390912ffe9a5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.212498 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39cbb12-e84d-4f98-a410-86f3103ea424","Type":"ContainerStarted","Data":"0137f0c39daa5d8f9102b706b2b8fc14b43af74d9196506c98d3b5a8c8074037"} Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.212794 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39cbb12-e84d-4f98-a410-86f3103ea424","Type":"ContainerStarted","Data":"6dd77328be1b555b7015b56065f860f7441b94b8f4e34b27f03f7159c6e0d531"} Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.214496 4916 generic.go:334] "Generic (PLEG): container finished" podID="7eac938e-d147-4214-a0b1-4a17ac69b649" containerID="90b20d3334567229aba4e98e42071ef308818e8ba18253da6b66ae22e934cf35" exitCode=0 Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.214544 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-l4sh2" event={"ID":"7eac938e-d147-4214-a0b1-4a17ac69b649","Type":"ContainerDied","Data":"90b20d3334567229aba4e98e42071ef308818e8ba18253da6b66ae22e934cf35"} Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.217746 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-q6cjh" event={"ID":"0d5347c1-1439-4284-977d-390912ffe9a5","Type":"ContainerDied","Data":"00c1854f62560940e96e27e1043cd769c0e5a04dee7e53b78ede988f64f46b96"} Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.217767 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00c1854f62560940e96e27e1043cd769c0e5a04dee7e53b78ede988f64f46b96" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.217806 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-q6cjh" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.415776 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6c644b9d95-mhwlb"] Dec 03 19:49:55 crc kubenswrapper[4916]: E1203 19:49:55.416169 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d5347c1-1439-4284-977d-390912ffe9a5" containerName="barbican-db-sync" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.416185 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d5347c1-1439-4284-977d-390912ffe9a5" containerName="barbican-db-sync" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.416354 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d5347c1-1439-4284-977d-390912ffe9a5" containerName="barbican-db-sync" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.417252 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.418171 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7755d7d784-5sk27"] Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.419545 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.420202 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.420402 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-gn5p6" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.420685 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.427805 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.433019 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6c644b9d95-mhwlb"] Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.437487 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33d04f2-ecc8-4c07-b258-60918f9aff05-config-data\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.437831 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33d04f2-ecc8-4c07-b258-60918f9aff05-combined-ca-bundle\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.437862 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b33d04f2-ecc8-4c07-b258-60918f9aff05-config-data-custom\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.437880 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b33d04f2-ecc8-4c07-b258-60918f9aff05-logs\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.437928 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdb9j\" (UniqueName: \"kubernetes.io/projected/b33d04f2-ecc8-4c07-b258-60918f9aff05-kube-api-access-fdb9j\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.455523 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7755d7d784-5sk27"] Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.530759 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-px5fk"] Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.530982 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" podUID="cb050870-526a-4489-a369-88fe8f92a432" containerName="dnsmasq-dns" containerID="cri-o://5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168" gracePeriod=10 Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.539746 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdb9j\" (UniqueName: \"kubernetes.io/projected/b33d04f2-ecc8-4c07-b258-60918f9aff05-kube-api-access-fdb9j\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.539805 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-config-data-custom\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.539858 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbg89\" (UniqueName: \"kubernetes.io/projected/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-kube-api-access-cbg89\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.539886 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-config-data\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.539915 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-logs\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.539936 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-combined-ca-bundle\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.539957 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33d04f2-ecc8-4c07-b258-60918f9aff05-config-data\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.539977 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33d04f2-ecc8-4c07-b258-60918f9aff05-combined-ca-bundle\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.540001 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b33d04f2-ecc8-4c07-b258-60918f9aff05-config-data-custom\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.540016 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b33d04f2-ecc8-4c07-b258-60918f9aff05-logs\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.540415 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b33d04f2-ecc8-4c07-b258-60918f9aff05-logs\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.542697 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.550243 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b33d04f2-ecc8-4c07-b258-60918f9aff05-config-data-custom\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.551800 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33d04f2-ecc8-4c07-b258-60918f9aff05-config-data\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.570968 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33d04f2-ecc8-4c07-b258-60918f9aff05-combined-ca-bundle\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.578122 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdb9j\" (UniqueName: \"kubernetes.io/projected/b33d04f2-ecc8-4c07-b258-60918f9aff05-kube-api-access-fdb9j\") pod \"barbican-worker-6c644b9d95-mhwlb\" (UID: \"b33d04f2-ecc8-4c07-b258-60918f9aff05\") " pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.628375 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-dl8tp"] Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.643345 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-config-data-custom\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.643414 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbg89\" (UniqueName: \"kubernetes.io/projected/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-kube-api-access-cbg89\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.643441 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-config-data\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.643475 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-logs\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.643501 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-combined-ca-bundle\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.646655 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-logs\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.647666 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-combined-ca-bundle\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.664136 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-config-data\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.667402 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-config-data-custom\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.689938 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.694456 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbg89\" (UniqueName: \"kubernetes.io/projected/c3d699e4-f5e8-4719-bc16-b5a85bcaa695-kube-api-access-cbg89\") pod \"barbican-keystone-listener-7755d7d784-5sk27\" (UID: \"c3d699e4-f5e8-4719-bc16-b5a85bcaa695\") " pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.701724 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-dl8tp"] Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.734748 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5d9557c554-w6j5d"] Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.737030 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.738774 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.745348 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.745400 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.745424 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.745501 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-svc\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.745587 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0252d76f-bb1d-4341-a584-660f1ecd343a-logs\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.745605 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4vnc\" (UniqueName: \"kubernetes.io/projected/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-kube-api-access-b4vnc\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.745645 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data-custom\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.745714 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzxc5\" (UniqueName: \"kubernetes.io/projected/0252d76f-bb1d-4341-a584-660f1ecd343a-kube-api-access-qzxc5\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.745735 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-combined-ca-bundle\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.745793 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-config\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.745826 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.748903 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5d9557c554-w6j5d"] Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.765504 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6c644b9d95-mhwlb" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.785305 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.848510 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.849067 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.849115 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.849174 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.849193 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-svc\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.849299 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0252d76f-bb1d-4341-a584-660f1ecd343a-logs\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.849318 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4vnc\" (UniqueName: \"kubernetes.io/projected/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-kube-api-access-b4vnc\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.849392 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data-custom\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.849438 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzxc5\" (UniqueName: \"kubernetes.io/projected/0252d76f-bb1d-4341-a584-660f1ecd343a-kube-api-access-qzxc5\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.849455 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-combined-ca-bundle\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.849497 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-config\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.850457 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-config\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.852142 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.853154 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.853154 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.855055 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-svc\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.856758 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.853457 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0252d76f-bb1d-4341-a584-660f1ecd343a-logs\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.862498 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data-custom\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.862677 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-combined-ca-bundle\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.866695 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4vnc\" (UniqueName: \"kubernetes.io/projected/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-kube-api-access-b4vnc\") pod \"dnsmasq-dns-85ff748b95-dl8tp\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.867771 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzxc5\" (UniqueName: \"kubernetes.io/projected/0252d76f-bb1d-4341-a584-660f1ecd343a-kube-api-access-qzxc5\") pod \"barbican-api-5d9557c554-w6j5d\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.919008 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:55 crc kubenswrapper[4916]: I1203 19:49:55.942191 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.083468 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.229112 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39cbb12-e84d-4f98-a410-86f3103ea424","Type":"ContainerStarted","Data":"f76de2275956e7c7963c6e17153618e3f5173f757c906f457b56d523e3ef0373"} Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.230804 4916 generic.go:334] "Generic (PLEG): container finished" podID="cb050870-526a-4489-a369-88fe8f92a432" containerID="5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168" exitCode=0 Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.230879 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.230905 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" event={"ID":"cb050870-526a-4489-a369-88fe8f92a432","Type":"ContainerDied","Data":"5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168"} Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.230930 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-px5fk" event={"ID":"cb050870-526a-4489-a369-88fe8f92a432","Type":"ContainerDied","Data":"d68975feb43fce356f646a3eac5c91123e6e7e2e762084167aafc2da4f9687b1"} Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.230945 4916 scope.go:117] "RemoveContainer" containerID="5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.251096 4916 scope.go:117] "RemoveContainer" containerID="f2e5261c15edbf101d85b1cb1f8d12bc77e4b989a782c6c18ef4266ee705400b" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.256546 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-sb\") pod \"cb050870-526a-4489-a369-88fe8f92a432\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.257895 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-config\") pod \"cb050870-526a-4489-a369-88fe8f92a432\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.257980 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-nb\") pod \"cb050870-526a-4489-a369-88fe8f92a432\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.258003 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzksq\" (UniqueName: \"kubernetes.io/projected/cb050870-526a-4489-a369-88fe8f92a432-kube-api-access-pzksq\") pod \"cb050870-526a-4489-a369-88fe8f92a432\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.264507 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb050870-526a-4489-a369-88fe8f92a432-kube-api-access-pzksq" (OuterVolumeSpecName: "kube-api-access-pzksq") pod "cb050870-526a-4489-a369-88fe8f92a432" (UID: "cb050870-526a-4489-a369-88fe8f92a432"). InnerVolumeSpecName "kube-api-access-pzksq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.266109 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-swift-storage-0\") pod \"cb050870-526a-4489-a369-88fe8f92a432\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.266385 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-svc\") pod \"cb050870-526a-4489-a369-88fe8f92a432\" (UID: \"cb050870-526a-4489-a369-88fe8f92a432\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.267066 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzksq\" (UniqueName: \"kubernetes.io/projected/cb050870-526a-4489-a369-88fe8f92a432-kube-api-access-pzksq\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.278449 4916 scope.go:117] "RemoveContainer" containerID="5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168" Dec 03 19:49:56 crc kubenswrapper[4916]: E1203 19:49:56.289832 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168\": container with ID starting with 5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168 not found: ID does not exist" containerID="5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.289872 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168"} err="failed to get container status \"5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168\": rpc error: code = NotFound desc = could not find container \"5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168\": container with ID starting with 5dcc5aa43589b76de7a474304a6b0cc4b0c99bb2e10f7d4968594a5491e45168 not found: ID does not exist" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.289896 4916 scope.go:117] "RemoveContainer" containerID="f2e5261c15edbf101d85b1cb1f8d12bc77e4b989a782c6c18ef4266ee705400b" Dec 03 19:49:56 crc kubenswrapper[4916]: E1203 19:49:56.290290 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2e5261c15edbf101d85b1cb1f8d12bc77e4b989a782c6c18ef4266ee705400b\": container with ID starting with f2e5261c15edbf101d85b1cb1f8d12bc77e4b989a782c6c18ef4266ee705400b not found: ID does not exist" containerID="f2e5261c15edbf101d85b1cb1f8d12bc77e4b989a782c6c18ef4266ee705400b" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.290313 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2e5261c15edbf101d85b1cb1f8d12bc77e4b989a782c6c18ef4266ee705400b"} err="failed to get container status \"f2e5261c15edbf101d85b1cb1f8d12bc77e4b989a782c6c18ef4266ee705400b\": rpc error: code = NotFound desc = could not find container \"f2e5261c15edbf101d85b1cb1f8d12bc77e4b989a782c6c18ef4266ee705400b\": container with ID starting with f2e5261c15edbf101d85b1cb1f8d12bc77e4b989a782c6c18ef4266ee705400b not found: ID does not exist" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.323100 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7755d7d784-5sk27"] Dec 03 19:49:56 crc kubenswrapper[4916]: W1203 19:49:56.331223 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3d699e4_f5e8_4719_bc16_b5a85bcaa695.slice/crio-0461b702221725be1de2c6c9505faf81b008bd75e7d978a207fc9e7bb5beafce WatchSource:0}: Error finding container 0461b702221725be1de2c6c9505faf81b008bd75e7d978a207fc9e7bb5beafce: Status 404 returned error can't find the container with id 0461b702221725be1de2c6c9505faf81b008bd75e7d978a207fc9e7bb5beafce Dec 03 19:49:56 crc kubenswrapper[4916]: W1203 19:49:56.342443 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb33d04f2_ecc8_4c07_b258_60918f9aff05.slice/crio-ebbbdced986067d329e839e04abcfc1b6faec0aba9d3ec512e6c47a73133b219 WatchSource:0}: Error finding container ebbbdced986067d329e839e04abcfc1b6faec0aba9d3ec512e6c47a73133b219: Status 404 returned error can't find the container with id ebbbdced986067d329e839e04abcfc1b6faec0aba9d3ec512e6c47a73133b219 Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.343205 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "cb050870-526a-4489-a369-88fe8f92a432" (UID: "cb050870-526a-4489-a369-88fe8f92a432"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.350027 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-config" (OuterVolumeSpecName: "config") pod "cb050870-526a-4489-a369-88fe8f92a432" (UID: "cb050870-526a-4489-a369-88fe8f92a432"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.352284 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6c644b9d95-mhwlb"] Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.364336 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cb050870-526a-4489-a369-88fe8f92a432" (UID: "cb050870-526a-4489-a369-88fe8f92a432"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.368974 4916 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.369000 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.369010 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.375227 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cb050870-526a-4489-a369-88fe8f92a432" (UID: "cb050870-526a-4489-a369-88fe8f92a432"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.378495 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cb050870-526a-4489-a369-88fe8f92a432" (UID: "cb050870-526a-4489-a369-88fe8f92a432"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.452241 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-dl8tp"] Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.470748 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.470784 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb050870-526a-4489-a369-88fe8f92a432-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.556774 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-px5fk"] Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.565595 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-px5fk"] Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.587968 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:49:56 crc kubenswrapper[4916]: W1203 19:49:56.593967 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0252d76f_bb1d_4341_a584_660f1ecd343a.slice/crio-37af3923053ecfddba1cf4fc4f37a80c642d18678d28ec597783e58276926804 WatchSource:0}: Error finding container 37af3923053ecfddba1cf4fc4f37a80c642d18678d28ec597783e58276926804: Status 404 returned error can't find the container with id 37af3923053ecfddba1cf4fc4f37a80c642d18678d28ec597783e58276926804 Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.594763 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5d9557c554-w6j5d"] Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.675231 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-db-sync-config-data\") pod \"7eac938e-d147-4214-a0b1-4a17ac69b649\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.675276 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-scripts\") pod \"7eac938e-d147-4214-a0b1-4a17ac69b649\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.675406 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vlpl\" (UniqueName: \"kubernetes.io/projected/7eac938e-d147-4214-a0b1-4a17ac69b649-kube-api-access-4vlpl\") pod \"7eac938e-d147-4214-a0b1-4a17ac69b649\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.675429 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-config-data\") pod \"7eac938e-d147-4214-a0b1-4a17ac69b649\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.675467 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-combined-ca-bundle\") pod \"7eac938e-d147-4214-a0b1-4a17ac69b649\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.675533 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eac938e-d147-4214-a0b1-4a17ac69b649-etc-machine-id\") pod \"7eac938e-d147-4214-a0b1-4a17ac69b649\" (UID: \"7eac938e-d147-4214-a0b1-4a17ac69b649\") " Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.675952 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7eac938e-d147-4214-a0b1-4a17ac69b649-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7eac938e-d147-4214-a0b1-4a17ac69b649" (UID: "7eac938e-d147-4214-a0b1-4a17ac69b649"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.682233 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eac938e-d147-4214-a0b1-4a17ac69b649-kube-api-access-4vlpl" (OuterVolumeSpecName: "kube-api-access-4vlpl") pod "7eac938e-d147-4214-a0b1-4a17ac69b649" (UID: "7eac938e-d147-4214-a0b1-4a17ac69b649"). InnerVolumeSpecName "kube-api-access-4vlpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.682445 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-scripts" (OuterVolumeSpecName: "scripts") pod "7eac938e-d147-4214-a0b1-4a17ac69b649" (UID: "7eac938e-d147-4214-a0b1-4a17ac69b649"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.682720 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7eac938e-d147-4214-a0b1-4a17ac69b649" (UID: "7eac938e-d147-4214-a0b1-4a17ac69b649"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.699515 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7eac938e-d147-4214-a0b1-4a17ac69b649" (UID: "7eac938e-d147-4214-a0b1-4a17ac69b649"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.738462 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-config-data" (OuterVolumeSpecName: "config-data") pod "7eac938e-d147-4214-a0b1-4a17ac69b649" (UID: "7eac938e-d147-4214-a0b1-4a17ac69b649"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.777487 4916 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.777704 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.777772 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vlpl\" (UniqueName: \"kubernetes.io/projected/7eac938e-d147-4214-a0b1-4a17ac69b649-kube-api-access-4vlpl\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.777824 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.777872 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eac938e-d147-4214-a0b1-4a17ac69b649-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:56 crc kubenswrapper[4916]: I1203 19:49:56.777921 4916 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7eac938e-d147-4214-a0b1-4a17ac69b649-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.263302 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-l4sh2" event={"ID":"7eac938e-d147-4214-a0b1-4a17ac69b649","Type":"ContainerDied","Data":"c9d148e8225f6114f6ca571f07be2155b40da1ed393c3427ad5744ac0a116181"} Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.263581 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9d148e8225f6114f6ca571f07be2155b40da1ed393c3427ad5744ac0a116181" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.263591 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-l4sh2" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.267958 4916 generic.go:334] "Generic (PLEG): container finished" podID="0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" containerID="ec0a3eb6205eaca1408202a31fc3ff6d296739d13af0853ee2a0b4f3747dfbe7" exitCode=0 Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.268002 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" event={"ID":"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60","Type":"ContainerDied","Data":"ec0a3eb6205eaca1408202a31fc3ff6d296739d13af0853ee2a0b4f3747dfbe7"} Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.268018 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" event={"ID":"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60","Type":"ContainerStarted","Data":"1f5914b2eb8731df26b2e9f90db1abc505923ca397ec34d3568e5859134c62d1"} Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.274981 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c644b9d95-mhwlb" event={"ID":"b33d04f2-ecc8-4c07-b258-60918f9aff05","Type":"ContainerStarted","Data":"ebbbdced986067d329e839e04abcfc1b6faec0aba9d3ec512e6c47a73133b219"} Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.279166 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" event={"ID":"c3d699e4-f5e8-4719-bc16-b5a85bcaa695","Type":"ContainerStarted","Data":"0461b702221725be1de2c6c9505faf81b008bd75e7d978a207fc9e7bb5beafce"} Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.286207 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d9557c554-w6j5d" event={"ID":"0252d76f-bb1d-4341-a584-660f1ecd343a","Type":"ContainerStarted","Data":"2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f"} Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.286248 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d9557c554-w6j5d" event={"ID":"0252d76f-bb1d-4341-a584-660f1ecd343a","Type":"ContainerStarted","Data":"da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72"} Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.286258 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d9557c554-w6j5d" event={"ID":"0252d76f-bb1d-4341-a584-660f1ecd343a","Type":"ContainerStarted","Data":"37af3923053ecfddba1cf4fc4f37a80c642d18678d28ec597783e58276926804"} Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.287047 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.287069 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.291966 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39cbb12-e84d-4f98-a410-86f3103ea424","Type":"ContainerStarted","Data":"8e807111b2848e32feeea2d9cc9ea59650a097865daa18487499b00bd599346a"} Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.320104 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5d9557c554-w6j5d" podStartSLOduration=2.320086748 podStartE2EDuration="2.320086748s" podCreationTimestamp="2025-12-03 19:49:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:49:57.31189893 +0000 UTC m=+1213.274709196" watchObservedRunningTime="2025-12-03 19:49:57.320086748 +0000 UTC m=+1213.282897014" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.495492 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 19:49:57 crc kubenswrapper[4916]: E1203 19:49:57.495960 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb050870-526a-4489-a369-88fe8f92a432" containerName="dnsmasq-dns" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.495984 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb050870-526a-4489-a369-88fe8f92a432" containerName="dnsmasq-dns" Dec 03 19:49:57 crc kubenswrapper[4916]: E1203 19:49:57.496005 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eac938e-d147-4214-a0b1-4a17ac69b649" containerName="cinder-db-sync" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.496013 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eac938e-d147-4214-a0b1-4a17ac69b649" containerName="cinder-db-sync" Dec 03 19:49:57 crc kubenswrapper[4916]: E1203 19:49:57.496032 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb050870-526a-4489-a369-88fe8f92a432" containerName="init" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.496039 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb050870-526a-4489-a369-88fe8f92a432" containerName="init" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.498386 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb050870-526a-4489-a369-88fe8f92a432" containerName="dnsmasq-dns" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.498442 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eac938e-d147-4214-a0b1-4a17ac69b649" containerName="cinder-db-sync" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.499710 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.507406 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-xrjfc" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.509101 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.515221 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.515423 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.515533 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.622930 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.623049 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecd942d9-0c00-4f99-9473-fda757de6e65-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.623137 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.626127 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-scripts\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.626240 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs6vg\" (UniqueName: \"kubernetes.io/projected/ecd942d9-0c00-4f99-9473-fda757de6e65-kube-api-access-bs6vg\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.626306 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.683528 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-dl8tp"] Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.729197 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs6vg\" (UniqueName: \"kubernetes.io/projected/ecd942d9-0c00-4f99-9473-fda757de6e65-kube-api-access-bs6vg\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.729240 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.729287 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.729324 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecd942d9-0c00-4f99-9473-fda757de6e65-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.730377 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecd942d9-0c00-4f99-9473-fda757de6e65-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.730425 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.730502 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-scripts\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.744445 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-hzm9s"] Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.746043 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.748953 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.753964 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.754779 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.755045 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-scripts\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.765721 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-hzm9s"] Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.795127 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs6vg\" (UniqueName: \"kubernetes.io/projected/ecd942d9-0c00-4f99-9473-fda757de6e65-kube-api-access-bs6vg\") pod \"cinder-scheduler-0\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.834343 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-config\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.834399 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.834454 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqz42\" (UniqueName: \"kubernetes.io/projected/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-kube-api-access-vqz42\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.834500 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.834552 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.834612 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.855408 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.855623 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.870761 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.877652 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.891488 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936594 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936647 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936669 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wllp4\" (UniqueName: \"kubernetes.io/projected/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-kube-api-access-wllp4\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936716 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-config\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936742 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936764 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data-custom\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936801 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqz42\" (UniqueName: \"kubernetes.io/projected/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-kube-api-access-vqz42\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936838 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936861 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-scripts\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936879 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-etc-machine-id\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936901 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936925 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.936944 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-logs\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.937942 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-config\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.941445 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.941923 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.942834 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.943091 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:57 crc kubenswrapper[4916]: I1203 19:49:57.979451 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqz42\" (UniqueName: \"kubernetes.io/projected/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-kube-api-access-vqz42\") pod \"dnsmasq-dns-5c9776ccc5-hzm9s\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.038793 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-scripts\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.038838 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-etc-machine-id\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.038864 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.038901 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-logs\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.038934 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-etc-machine-id\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.038961 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.038979 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wllp4\" (UniqueName: \"kubernetes.io/projected/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-kube-api-access-wllp4\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.039056 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data-custom\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.039591 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-logs\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.043117 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.044019 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-scripts\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.044917 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data-custom\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.046795 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.056383 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wllp4\" (UniqueName: \"kubernetes.io/projected/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-kube-api-access-wllp4\") pod \"cinder-api-0\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.233582 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.248989 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 19:49:58 crc kubenswrapper[4916]: I1203 19:49:58.510126 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb050870-526a-4489-a369-88fe8f92a432" path="/var/lib/kubelet/pods/cb050870-526a-4489-a369-88fe8f92a432/volumes" Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.132789 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 19:49:59 crc kubenswrapper[4916]: W1203 19:49:59.138233 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podecd942d9_0c00_4f99_9473_fda757de6e65.slice/crio-482d30d1226b198ad07f5cfb38a70969c77f3f42214954b5d050b2fce13b5d8e WatchSource:0}: Error finding container 482d30d1226b198ad07f5cfb38a70969c77f3f42214954b5d050b2fce13b5d8e: Status 404 returned error can't find the container with id 482d30d1226b198ad07f5cfb38a70969c77f3f42214954b5d050b2fce13b5d8e Dec 03 19:49:59 crc kubenswrapper[4916]: W1203 19:49:59.220658 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87ff7ec0_4c1d_46c2_a7aa_ed52b6e5482d.slice/crio-15080ca14742f37b1f2568f5c62d7ae5415500d6e349ef9ad4936b4587a6addd WatchSource:0}: Error finding container 15080ca14742f37b1f2568f5c62d7ae5415500d6e349ef9ad4936b4587a6addd: Status 404 returned error can't find the container with id 15080ca14742f37b1f2568f5c62d7ae5415500d6e349ef9ad4936b4587a6addd Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.223627 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.235512 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-hzm9s"] Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.323451 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647","Type":"ContainerStarted","Data":"ee192c056aeef5a2242b3fa363db72936ca9e9b3da886f94955d1db48a077f80"} Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.340855 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39cbb12-e84d-4f98-a410-86f3103ea424","Type":"ContainerStarted","Data":"fcb50210936835d5b058d51fee8e00f49e0cf13eaeb451f09be4424bcc279402"} Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.341374 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.346435 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" event={"ID":"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60","Type":"ContainerStarted","Data":"85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2"} Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.346540 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" podUID="0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" containerName="dnsmasq-dns" containerID="cri-o://85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2" gracePeriod=10 Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.346626 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.353848 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c644b9d95-mhwlb" event={"ID":"b33d04f2-ecc8-4c07-b258-60918f9aff05","Type":"ContainerStarted","Data":"8b58ae5785b0e46d53f167c750588eb963a1796e6525041a3963e224c07ece47"} Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.362640 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" event={"ID":"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d","Type":"ContainerStarted","Data":"15080ca14742f37b1f2568f5c62d7ae5415500d6e349ef9ad4936b4587a6addd"} Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.363216 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.129729957 podStartE2EDuration="6.363201445s" podCreationTimestamp="2025-12-03 19:49:53 +0000 UTC" firstStartedPulling="2025-12-03 19:49:54.47926981 +0000 UTC m=+1210.442080076" lastFinishedPulling="2025-12-03 19:49:58.712741278 +0000 UTC m=+1214.675551564" observedRunningTime="2025-12-03 19:49:59.361358656 +0000 UTC m=+1215.324168922" watchObservedRunningTime="2025-12-03 19:49:59.363201445 +0000 UTC m=+1215.326011711" Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.372967 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" event={"ID":"c3d699e4-f5e8-4719-bc16-b5a85bcaa695","Type":"ContainerStarted","Data":"9eda7dc893efc6d8882c491c8c374ca153aa26c8d9498fcb6334e3ccaa5f8d58"} Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.383807 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" podStartSLOduration=4.383790184 podStartE2EDuration="4.383790184s" podCreationTimestamp="2025-12-03 19:49:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:49:59.375479602 +0000 UTC m=+1215.338289868" watchObservedRunningTime="2025-12-03 19:49:59.383790184 +0000 UTC m=+1215.346600440" Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.387416 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd942d9-0c00-4f99-9473-fda757de6e65","Type":"ContainerStarted","Data":"482d30d1226b198ad07f5cfb38a70969c77f3f42214954b5d050b2fce13b5d8e"} Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.809293 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.990032 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-svc\") pod \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.990116 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-swift-storage-0\") pod \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.990195 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-config\") pod \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.990236 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b4vnc\" (UniqueName: \"kubernetes.io/projected/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-kube-api-access-b4vnc\") pod \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.990305 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-sb\") pod \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.990333 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-nb\") pod \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\" (UID: \"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60\") " Dec 03 19:49:59 crc kubenswrapper[4916]: I1203 19:49:59.998970 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-kube-api-access-b4vnc" (OuterVolumeSpecName: "kube-api-access-b4vnc") pod "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" (UID: "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60"). InnerVolumeSpecName "kube-api-access-b4vnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.043472 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-config" (OuterVolumeSpecName: "config") pod "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" (UID: "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.047464 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" (UID: "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.070798 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" (UID: "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.072649 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" (UID: "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.092593 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.092629 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.092644 4916 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.092657 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.092668 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b4vnc\" (UniqueName: \"kubernetes.io/projected/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-kube-api-access-b4vnc\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.097905 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" (UID: "0adc6844-f1ee-468d-abd3-bfc6f3f3bb60"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.194747 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.426813 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647","Type":"ContainerStarted","Data":"3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4"} Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.428726 4916 generic.go:334] "Generic (PLEG): container finished" podID="0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" containerID="85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2" exitCode=0 Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.428770 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" event={"ID":"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60","Type":"ContainerDied","Data":"85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2"} Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.428787 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" event={"ID":"0adc6844-f1ee-468d-abd3-bfc6f3f3bb60","Type":"ContainerDied","Data":"1f5914b2eb8731df26b2e9f90db1abc505923ca397ec34d3568e5859134c62d1"} Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.428802 4916 scope.go:117] "RemoveContainer" containerID="85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.428900 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-dl8tp" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.442137 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c644b9d95-mhwlb" event={"ID":"b33d04f2-ecc8-4c07-b258-60918f9aff05","Type":"ContainerStarted","Data":"3c081bb16a1df8af85b151f70290c3729049946b64c0c1f7bc38566caba7bb9b"} Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.444331 4916 generic.go:334] "Generic (PLEG): container finished" podID="87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" containerID="0a9df916a95089eeae26a6141af41114e2cab992b21bf3db343a0eea435ee1ea" exitCode=0 Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.444408 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" event={"ID":"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d","Type":"ContainerDied","Data":"0a9df916a95089eeae26a6141af41114e2cab992b21bf3db343a0eea435ee1ea"} Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.497961 4916 scope.go:117] "RemoveContainer" containerID="ec0a3eb6205eaca1408202a31fc3ff6d296739d13af0853ee2a0b4f3747dfbe7" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.502457 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" event={"ID":"c3d699e4-f5e8-4719-bc16-b5a85bcaa695","Type":"ContainerStarted","Data":"d9ee5988c2730dc395b3c5f716c113aa9b63b0c00f1a359c23c52918d4d64207"} Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.517248 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6c644b9d95-mhwlb" podStartSLOduration=3.1594148 podStartE2EDuration="5.517231263s" podCreationTimestamp="2025-12-03 19:49:55 +0000 UTC" firstStartedPulling="2025-12-03 19:49:56.351206195 +0000 UTC m=+1212.314016451" lastFinishedPulling="2025-12-03 19:49:58.709022648 +0000 UTC m=+1214.671832914" observedRunningTime="2025-12-03 19:50:00.46346404 +0000 UTC m=+1216.426274306" watchObservedRunningTime="2025-12-03 19:50:00.517231263 +0000 UTC m=+1216.480041529" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.531622 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-dl8tp"] Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.537664 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-dl8tp"] Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.540079 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7755d7d784-5sk27" podStartSLOduration=3.174567793 podStartE2EDuration="5.540060142s" podCreationTimestamp="2025-12-03 19:49:55 +0000 UTC" firstStartedPulling="2025-12-03 19:49:56.342309537 +0000 UTC m=+1212.305119803" lastFinishedPulling="2025-12-03 19:49:58.707801886 +0000 UTC m=+1214.670612152" observedRunningTime="2025-12-03 19:50:00.51787081 +0000 UTC m=+1216.480681076" watchObservedRunningTime="2025-12-03 19:50:00.540060142 +0000 UTC m=+1216.502870408" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.559738 4916 scope.go:117] "RemoveContainer" containerID="85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2" Dec 03 19:50:00 crc kubenswrapper[4916]: E1203 19:50:00.561764 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2\": container with ID starting with 85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2 not found: ID does not exist" containerID="85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.561811 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2"} err="failed to get container status \"85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2\": rpc error: code = NotFound desc = could not find container \"85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2\": container with ID starting with 85c24db23d41502546bb65be5072a7759d6eef5329794512bec6ff9aa3f2dbe2 not found: ID does not exist" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.561835 4916 scope.go:117] "RemoveContainer" containerID="ec0a3eb6205eaca1408202a31fc3ff6d296739d13af0853ee2a0b4f3747dfbe7" Dec 03 19:50:00 crc kubenswrapper[4916]: E1203 19:50:00.562087 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec0a3eb6205eaca1408202a31fc3ff6d296739d13af0853ee2a0b4f3747dfbe7\": container with ID starting with ec0a3eb6205eaca1408202a31fc3ff6d296739d13af0853ee2a0b4f3747dfbe7 not found: ID does not exist" containerID="ec0a3eb6205eaca1408202a31fc3ff6d296739d13af0853ee2a0b4f3747dfbe7" Dec 03 19:50:00 crc kubenswrapper[4916]: I1203 19:50:00.562104 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec0a3eb6205eaca1408202a31fc3ff6d296739d13af0853ee2a0b4f3747dfbe7"} err="failed to get container status \"ec0a3eb6205eaca1408202a31fc3ff6d296739d13af0853ee2a0b4f3747dfbe7\": rpc error: code = NotFound desc = could not find container \"ec0a3eb6205eaca1408202a31fc3ff6d296739d13af0853ee2a0b4f3747dfbe7\": container with ID starting with ec0a3eb6205eaca1408202a31fc3ff6d296739d13af0853ee2a0b4f3747dfbe7 not found: ID does not exist" Dec 03 19:50:01 crc kubenswrapper[4916]: I1203 19:50:01.498457 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd942d9-0c00-4f99-9473-fda757de6e65","Type":"ContainerStarted","Data":"bce7941ea05c0d81c369d47303b1acca0c3067ff6404847a7dd32a88c96fc6aa"} Dec 03 19:50:01 crc kubenswrapper[4916]: I1203 19:50:01.498983 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd942d9-0c00-4f99-9473-fda757de6e65","Type":"ContainerStarted","Data":"2e540a5688288632acc054191995ed9e07b5605662c79481a17791728a86b097"} Dec 03 19:50:01 crc kubenswrapper[4916]: I1203 19:50:01.502179 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647","Type":"ContainerStarted","Data":"299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917"} Dec 03 19:50:01 crc kubenswrapper[4916]: I1203 19:50:01.502823 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 19:50:01 crc kubenswrapper[4916]: I1203 19:50:01.506303 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" event={"ID":"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d","Type":"ContainerStarted","Data":"4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8"} Dec 03 19:50:01 crc kubenswrapper[4916]: I1203 19:50:01.520172 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.75271372 podStartE2EDuration="4.520157055s" podCreationTimestamp="2025-12-03 19:49:57 +0000 UTC" firstStartedPulling="2025-12-03 19:49:59.140240632 +0000 UTC m=+1215.103050888" lastFinishedPulling="2025-12-03 19:49:59.907683937 +0000 UTC m=+1215.870494223" observedRunningTime="2025-12-03 19:50:01.517000651 +0000 UTC m=+1217.479810927" watchObservedRunningTime="2025-12-03 19:50:01.520157055 +0000 UTC m=+1217.482967311" Dec 03 19:50:01 crc kubenswrapper[4916]: I1203 19:50:01.543676 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" podStartSLOduration=4.543654801 podStartE2EDuration="4.543654801s" podCreationTimestamp="2025-12-03 19:49:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:01.536160211 +0000 UTC m=+1217.498970487" watchObservedRunningTime="2025-12-03 19:50:01.543654801 +0000 UTC m=+1217.506465067" Dec 03 19:50:01 crc kubenswrapper[4916]: I1203 19:50:01.561015 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.561000163 podStartE2EDuration="4.561000163s" podCreationTimestamp="2025-12-03 19:49:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:01.560818599 +0000 UTC m=+1217.523628875" watchObservedRunningTime="2025-12-03 19:50:01.561000163 +0000 UTC m=+1217.523810419" Dec 03 19:50:01 crc kubenswrapper[4916]: I1203 19:50:01.956581 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.341956 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-f787c8578-2cjjd"] Dec 03 19:50:02 crc kubenswrapper[4916]: E1203 19:50:02.342778 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" containerName="init" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.342808 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" containerName="init" Dec 03 19:50:02 crc kubenswrapper[4916]: E1203 19:50:02.342822 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" containerName="dnsmasq-dns" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.342831 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" containerName="dnsmasq-dns" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.343051 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" containerName="dnsmasq-dns" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.344168 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.346651 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.346848 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.369244 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-f787c8578-2cjjd"] Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.486665 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0adc6844-f1ee-468d-abd3-bfc6f3f3bb60" path="/var/lib/kubelet/pods/0adc6844-f1ee-468d-abd3-bfc6f3f3bb60/volumes" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.513177 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.534593 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-internal-tls-certs\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.534641 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-combined-ca-bundle\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.534676 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-config-data\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.534705 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w78xp\" (UniqueName: \"kubernetes.io/projected/eda087f8-dbb4-47ca-a210-576abc73a55e-kube-api-access-w78xp\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.534733 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-config-data-custom\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.534778 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-public-tls-certs\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.534793 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eda087f8-dbb4-47ca-a210-576abc73a55e-logs\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.637775 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-config-data-custom\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.637994 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-public-tls-certs\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.638025 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eda087f8-dbb4-47ca-a210-576abc73a55e-logs\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.638205 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-internal-tls-certs\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.638256 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-combined-ca-bundle\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.638342 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-config-data\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.638392 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w78xp\" (UniqueName: \"kubernetes.io/projected/eda087f8-dbb4-47ca-a210-576abc73a55e-kube-api-access-w78xp\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.640366 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eda087f8-dbb4-47ca-a210-576abc73a55e-logs\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.647847 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-public-tls-certs\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.648266 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-combined-ca-bundle\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.648599 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-config-data-custom\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.655957 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-internal-tls-certs\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.657741 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eda087f8-dbb4-47ca-a210-576abc73a55e-config-data\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.662633 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w78xp\" (UniqueName: \"kubernetes.io/projected/eda087f8-dbb4-47ca-a210-576abc73a55e-kube-api-access-w78xp\") pod \"barbican-api-f787c8578-2cjjd\" (UID: \"eda087f8-dbb4-47ca-a210-576abc73a55e\") " pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.856690 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 19:50:02 crc kubenswrapper[4916]: I1203 19:50:02.961003 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:03 crc kubenswrapper[4916]: W1203 19:50:03.492722 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeda087f8_dbb4_47ca_a210_576abc73a55e.slice/crio-edf500bba19fa184289079ccebf6f791f05abd3999548b8c298674ea45a9ce99 WatchSource:0}: Error finding container edf500bba19fa184289079ccebf6f791f05abd3999548b8c298674ea45a9ce99: Status 404 returned error can't find the container with id edf500bba19fa184289079ccebf6f791f05abd3999548b8c298674ea45a9ce99 Dec 03 19:50:03 crc kubenswrapper[4916]: I1203 19:50:03.496012 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-f787c8578-2cjjd"] Dec 03 19:50:03 crc kubenswrapper[4916]: I1203 19:50:03.535801 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f787c8578-2cjjd" event={"ID":"eda087f8-dbb4-47ca-a210-576abc73a55e","Type":"ContainerStarted","Data":"edf500bba19fa184289079ccebf6f791f05abd3999548b8c298674ea45a9ce99"} Dec 03 19:50:03 crc kubenswrapper[4916]: I1203 19:50:03.536948 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" containerName="cinder-api-log" containerID="cri-o://3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4" gracePeriod=30 Dec 03 19:50:03 crc kubenswrapper[4916]: I1203 19:50:03.537469 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" containerName="cinder-api" containerID="cri-o://299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917" gracePeriod=30 Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.083547 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.181096 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-scripts\") pod \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.181162 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-etc-machine-id\") pod \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.181192 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data-custom\") pod \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.181221 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data\") pod \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.181285 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-logs\") pod \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.181308 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wllp4\" (UniqueName: \"kubernetes.io/projected/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-kube-api-access-wllp4\") pod \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.181359 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-combined-ca-bundle\") pod \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\" (UID: \"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647\") " Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.184124 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-logs" (OuterVolumeSpecName: "logs") pod "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" (UID: "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.186688 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" (UID: "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.193976 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" (UID: "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.194457 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-kube-api-access-wllp4" (OuterVolumeSpecName: "kube-api-access-wllp4") pod "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" (UID: "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647"). InnerVolumeSpecName "kube-api-access-wllp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.204655 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-scripts" (OuterVolumeSpecName: "scripts") pod "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" (UID: "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.219737 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" (UID: "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.250510 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data" (OuterVolumeSpecName: "config-data") pod "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" (UID: "eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.283116 4916 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.283151 4916 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.283161 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.283172 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.283183 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wllp4\" (UniqueName: \"kubernetes.io/projected/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-kube-api-access-wllp4\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.283193 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.283200 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.553731 4916 generic.go:334] "Generic (PLEG): container finished" podID="eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" containerID="299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917" exitCode=0 Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.553770 4916 generic.go:334] "Generic (PLEG): container finished" podID="eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" containerID="3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4" exitCode=143 Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.553804 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.553845 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647","Type":"ContainerDied","Data":"299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917"} Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.553881 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647","Type":"ContainerDied","Data":"3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4"} Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.553897 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647","Type":"ContainerDied","Data":"ee192c056aeef5a2242b3fa363db72936ca9e9b3da886f94955d1db48a077f80"} Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.553919 4916 scope.go:117] "RemoveContainer" containerID="299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.557840 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f787c8578-2cjjd" event={"ID":"eda087f8-dbb4-47ca-a210-576abc73a55e","Type":"ContainerStarted","Data":"56c45e6297ddd04e83b80fd5de753a71748b1d4af09324f075b5dfa184acadfc"} Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.557877 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f787c8578-2cjjd" event={"ID":"eda087f8-dbb4-47ca-a210-576abc73a55e","Type":"ContainerStarted","Data":"be6d4bbf1c3841f410331bbfcce148ccaad503dafd9af4044a68c8f53fba7bea"} Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.558037 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.583920 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.603947 4916 scope.go:117] "RemoveContainer" containerID="3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.612603 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.626880 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 19:50:04 crc kubenswrapper[4916]: E1203 19:50:04.627350 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" containerName="cinder-api-log" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.627373 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" containerName="cinder-api-log" Dec 03 19:50:04 crc kubenswrapper[4916]: E1203 19:50:04.627400 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" containerName="cinder-api" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.627409 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" containerName="cinder-api" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.627663 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" containerName="cinder-api-log" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.627710 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" containerName="cinder-api" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.628841 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.633285 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.633812 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.635166 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.636487 4916 scope.go:117] "RemoveContainer" containerID="299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.638658 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-f787c8578-2cjjd" podStartSLOduration=2.638638283 podStartE2EDuration="2.638638283s" podCreationTimestamp="2025-12-03 19:50:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:04.598162415 +0000 UTC m=+1220.560972691" watchObservedRunningTime="2025-12-03 19:50:04.638638283 +0000 UTC m=+1220.601448549" Dec 03 19:50:04 crc kubenswrapper[4916]: E1203 19:50:04.639629 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917\": container with ID starting with 299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917 not found: ID does not exist" containerID="299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.639661 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917"} err="failed to get container status \"299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917\": rpc error: code = NotFound desc = could not find container \"299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917\": container with ID starting with 299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917 not found: ID does not exist" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.639684 4916 scope.go:117] "RemoveContainer" containerID="3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4" Dec 03 19:50:04 crc kubenswrapper[4916]: E1203 19:50:04.639966 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4\": container with ID starting with 3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4 not found: ID does not exist" containerID="3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.639988 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4"} err="failed to get container status \"3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4\": rpc error: code = NotFound desc = could not find container \"3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4\": container with ID starting with 3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4 not found: ID does not exist" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.640007 4916 scope.go:117] "RemoveContainer" containerID="299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.640272 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917"} err="failed to get container status \"299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917\": rpc error: code = NotFound desc = could not find container \"299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917\": container with ID starting with 299b2ed555100685fd59715cfb0acacea9c270eb167bc120040cdbe883950917 not found: ID does not exist" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.640295 4916 scope.go:117] "RemoveContainer" containerID="3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.643065 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4"} err="failed to get container status \"3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4\": rpc error: code = NotFound desc = could not find container \"3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4\": container with ID starting with 3b9a657549ff31f89c646f40289399bdf7b60f58f76729a6f09b15540d5be2e4 not found: ID does not exist" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.671774 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.691287 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-logs\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.691336 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-config-data-custom\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.691372 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nks6b\" (UniqueName: \"kubernetes.io/projected/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-kube-api-access-nks6b\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.691403 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-public-tls-certs\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.691492 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.691554 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.691659 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-config-data\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.691879 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.691950 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-scripts\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.793739 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-config-data\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.794130 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.794260 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-scripts\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.794556 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-logs\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.794682 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-config-data-custom\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.794789 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nks6b\" (UniqueName: \"kubernetes.io/projected/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-kube-api-access-nks6b\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.794894 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-public-tls-certs\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.795036 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.795142 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-logs\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.795309 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.795451 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.799302 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-config-data-custom\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.799341 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.800469 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-config-data\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.800805 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-scripts\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.801177 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-public-tls-certs\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.802121 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:04 crc kubenswrapper[4916]: I1203 19:50:04.832059 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nks6b\" (UniqueName: \"kubernetes.io/projected/b816c89d-8a9e-48c7-841a-dcb5ee7ab0df-kube-api-access-nks6b\") pod \"cinder-api-0\" (UID: \"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df\") " pod="openstack/cinder-api-0" Dec 03 19:50:05 crc kubenswrapper[4916]: I1203 19:50:05.000860 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 19:50:05 crc kubenswrapper[4916]: W1203 19:50:05.526125 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb816c89d_8a9e_48c7_841a_dcb5ee7ab0df.slice/crio-ec4dd11aaafbe79339c5aa125a9ae75865f4a9a529dac450d5b0028b2a082b28 WatchSource:0}: Error finding container ec4dd11aaafbe79339c5aa125a9ae75865f4a9a529dac450d5b0028b2a082b28: Status 404 returned error can't find the container with id ec4dd11aaafbe79339c5aa125a9ae75865f4a9a529dac450d5b0028b2a082b28 Dec 03 19:50:05 crc kubenswrapper[4916]: I1203 19:50:05.536751 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 19:50:05 crc kubenswrapper[4916]: I1203 19:50:05.573090 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df","Type":"ContainerStarted","Data":"ec4dd11aaafbe79339c5aa125a9ae75865f4a9a529dac450d5b0028b2a082b28"} Dec 03 19:50:05 crc kubenswrapper[4916]: I1203 19:50:05.575904 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:06 crc kubenswrapper[4916]: I1203 19:50:06.494320 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647" path="/var/lib/kubelet/pods/eab873f5-5fb7-4fa5-9bf5-2fc9a5d18647/volumes" Dec 03 19:50:06 crc kubenswrapper[4916]: I1203 19:50:06.587310 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df","Type":"ContainerStarted","Data":"da39e159b5e70a6be4c2feb597f09be0374ce76a2b246fe58c083c0a329d2daa"} Dec 03 19:50:07 crc kubenswrapper[4916]: I1203 19:50:07.228553 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:50:07 crc kubenswrapper[4916]: I1203 19:50:07.307891 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:50:07 crc kubenswrapper[4916]: I1203 19:50:07.596350 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b816c89d-8a9e-48c7-841a-dcb5ee7ab0df","Type":"ContainerStarted","Data":"fdcbdf3fd61db177d929e2252e02dad7508780d7595c192d76007a563ee6d97f"} Dec 03 19:50:07 crc kubenswrapper[4916]: I1203 19:50:07.659878 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.659854029 podStartE2EDuration="3.659854029s" podCreationTimestamp="2025-12-03 19:50:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:07.650084759 +0000 UTC m=+1223.612895025" watchObservedRunningTime="2025-12-03 19:50:07.659854029 +0000 UTC m=+1223.622664295" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.063362 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.118255 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.234858 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.294284 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-gd75g"] Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.294500 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" podUID="ecf17f0e-cc04-4896-b816-a045e729e980" containerName="dnsmasq-dns" containerID="cri-o://0deeb30e4842d919816dae1ff103c485be5c52c50762213299185f14b3ee23d6" gracePeriod=10 Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.611076 4916 generic.go:334] "Generic (PLEG): container finished" podID="ecf17f0e-cc04-4896-b816-a045e729e980" containerID="0deeb30e4842d919816dae1ff103c485be5c52c50762213299185f14b3ee23d6" exitCode=0 Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.611325 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" event={"ID":"ecf17f0e-cc04-4896-b816-a045e729e980","Type":"ContainerDied","Data":"0deeb30e4842d919816dae1ff103c485be5c52c50762213299185f14b3ee23d6"} Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.611534 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ecd942d9-0c00-4f99-9473-fda757de6e65" containerName="cinder-scheduler" containerID="cri-o://2e540a5688288632acc054191995ed9e07b5605662c79481a17791728a86b097" gracePeriod=30 Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.611866 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.611919 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ecd942d9-0c00-4f99-9473-fda757de6e65" containerName="probe" containerID="cri-o://bce7941ea05c0d81c369d47303b1acca0c3067ff6404847a7dd32a88c96fc6aa" gracePeriod=30 Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.792297 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.871594 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-config\") pod \"ecf17f0e-cc04-4896-b816-a045e729e980\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.871730 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-swift-storage-0\") pod \"ecf17f0e-cc04-4896-b816-a045e729e980\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.871839 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-nb\") pod \"ecf17f0e-cc04-4896-b816-a045e729e980\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.871892 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-sb\") pod \"ecf17f0e-cc04-4896-b816-a045e729e980\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.871938 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-svc\") pod \"ecf17f0e-cc04-4896-b816-a045e729e980\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.871996 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9phn\" (UniqueName: \"kubernetes.io/projected/ecf17f0e-cc04-4896-b816-a045e729e980-kube-api-access-z9phn\") pod \"ecf17f0e-cc04-4896-b816-a045e729e980\" (UID: \"ecf17f0e-cc04-4896-b816-a045e729e980\") " Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.877773 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecf17f0e-cc04-4896-b816-a045e729e980-kube-api-access-z9phn" (OuterVolumeSpecName: "kube-api-access-z9phn") pod "ecf17f0e-cc04-4896-b816-a045e729e980" (UID: "ecf17f0e-cc04-4896-b816-a045e729e980"). InnerVolumeSpecName "kube-api-access-z9phn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.926423 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ecf17f0e-cc04-4896-b816-a045e729e980" (UID: "ecf17f0e-cc04-4896-b816-a045e729e980"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.940385 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ecf17f0e-cc04-4896-b816-a045e729e980" (UID: "ecf17f0e-cc04-4896-b816-a045e729e980"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.946001 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ecf17f0e-cc04-4896-b816-a045e729e980" (UID: "ecf17f0e-cc04-4896-b816-a045e729e980"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.954558 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-config" (OuterVolumeSpecName: "config") pod "ecf17f0e-cc04-4896-b816-a045e729e980" (UID: "ecf17f0e-cc04-4896-b816-a045e729e980"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.968969 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ecf17f0e-cc04-4896-b816-a045e729e980" (UID: "ecf17f0e-cc04-4896-b816-a045e729e980"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.974667 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.974735 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.974749 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.974763 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9phn\" (UniqueName: \"kubernetes.io/projected/ecf17f0e-cc04-4896-b816-a045e729e980-kube-api-access-z9phn\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.974777 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:08 crc kubenswrapper[4916]: I1203 19:50:08.974787 4916 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ecf17f0e-cc04-4896-b816-a045e729e980-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:09 crc kubenswrapper[4916]: I1203 19:50:09.620802 4916 generic.go:334] "Generic (PLEG): container finished" podID="ecd942d9-0c00-4f99-9473-fda757de6e65" containerID="bce7941ea05c0d81c369d47303b1acca0c3067ff6404847a7dd32a88c96fc6aa" exitCode=0 Dec 03 19:50:09 crc kubenswrapper[4916]: I1203 19:50:09.621074 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd942d9-0c00-4f99-9473-fda757de6e65","Type":"ContainerDied","Data":"bce7941ea05c0d81c369d47303b1acca0c3067ff6404847a7dd32a88c96fc6aa"} Dec 03 19:50:09 crc kubenswrapper[4916]: I1203 19:50:09.623371 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" event={"ID":"ecf17f0e-cc04-4896-b816-a045e729e980","Type":"ContainerDied","Data":"561d46ba13ecf638c5f349704de15f31f3d6dd41e29f76c1b4be064c9f76cf11"} Dec 03 19:50:09 crc kubenswrapper[4916]: I1203 19:50:09.623418 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-gd75g" Dec 03 19:50:09 crc kubenswrapper[4916]: I1203 19:50:09.623432 4916 scope.go:117] "RemoveContainer" containerID="0deeb30e4842d919816dae1ff103c485be5c52c50762213299185f14b3ee23d6" Dec 03 19:50:09 crc kubenswrapper[4916]: I1203 19:50:09.653575 4916 scope.go:117] "RemoveContainer" containerID="aa1de021f75c26fafc46a054f5d541f7131b792fee54762e4c45f9a9c482f0dd" Dec 03 19:50:09 crc kubenswrapper[4916]: I1203 19:50:09.658819 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-gd75g"] Dec 03 19:50:09 crc kubenswrapper[4916]: I1203 19:50:09.676609 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-gd75g"] Dec 03 19:50:10 crc kubenswrapper[4916]: I1203 19:50:10.273806 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:50:10 crc kubenswrapper[4916]: I1203 19:50:10.281019 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5747d5b464-dtdts" Dec 03 19:50:10 crc kubenswrapper[4916]: I1203 19:50:10.505114 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecf17f0e-cc04-4896-b816-a045e729e980" path="/var/lib/kubelet/pods/ecf17f0e-cc04-4896-b816-a045e729e980/volumes" Dec 03 19:50:10 crc kubenswrapper[4916]: I1203 19:50:10.649964 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7c48bb485f-tqvlz" Dec 03 19:50:11 crc kubenswrapper[4916]: I1203 19:50:11.679903 4916 generic.go:334] "Generic (PLEG): container finished" podID="ecd942d9-0c00-4f99-9473-fda757de6e65" containerID="2e540a5688288632acc054191995ed9e07b5605662c79481a17791728a86b097" exitCode=0 Dec 03 19:50:11 crc kubenswrapper[4916]: I1203 19:50:11.680104 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd942d9-0c00-4f99-9473-fda757de6e65","Type":"ContainerDied","Data":"2e540a5688288632acc054191995ed9e07b5605662c79481a17791728a86b097"} Dec 03 19:50:11 crc kubenswrapper[4916]: I1203 19:50:11.867628 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.052435 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecd942d9-0c00-4f99-9473-fda757de6e65-etc-machine-id\") pod \"ecd942d9-0c00-4f99-9473-fda757de6e65\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.052472 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-scripts\") pod \"ecd942d9-0c00-4f99-9473-fda757de6e65\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.052505 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data\") pod \"ecd942d9-0c00-4f99-9473-fda757de6e65\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.052535 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bs6vg\" (UniqueName: \"kubernetes.io/projected/ecd942d9-0c00-4f99-9473-fda757de6e65-kube-api-access-bs6vg\") pod \"ecd942d9-0c00-4f99-9473-fda757de6e65\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.052581 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ecd942d9-0c00-4f99-9473-fda757de6e65-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ecd942d9-0c00-4f99-9473-fda757de6e65" (UID: "ecd942d9-0c00-4f99-9473-fda757de6e65"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.052614 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-combined-ca-bundle\") pod \"ecd942d9-0c00-4f99-9473-fda757de6e65\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.052636 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data-custom\") pod \"ecd942d9-0c00-4f99-9473-fda757de6e65\" (UID: \"ecd942d9-0c00-4f99-9473-fda757de6e65\") " Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.052996 4916 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ecd942d9-0c00-4f99-9473-fda757de6e65-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.059172 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecd942d9-0c00-4f99-9473-fda757de6e65-kube-api-access-bs6vg" (OuterVolumeSpecName: "kube-api-access-bs6vg") pod "ecd942d9-0c00-4f99-9473-fda757de6e65" (UID: "ecd942d9-0c00-4f99-9473-fda757de6e65"). InnerVolumeSpecName "kube-api-access-bs6vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.065334 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ecd942d9-0c00-4f99-9473-fda757de6e65" (UID: "ecd942d9-0c00-4f99-9473-fda757de6e65"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.067989 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-scripts" (OuterVolumeSpecName: "scripts") pod "ecd942d9-0c00-4f99-9473-fda757de6e65" (UID: "ecd942d9-0c00-4f99-9473-fda757de6e65"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.121202 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ecd942d9-0c00-4f99-9473-fda757de6e65" (UID: "ecd942d9-0c00-4f99-9473-fda757de6e65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.154964 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.154999 4916 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.155013 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.155026 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bs6vg\" (UniqueName: \"kubernetes.io/projected/ecd942d9-0c00-4f99-9473-fda757de6e65-kube-api-access-bs6vg\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.189143 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data" (OuterVolumeSpecName: "config-data") pod "ecd942d9-0c00-4f99-9473-fda757de6e65" (UID: "ecd942d9-0c00-4f99-9473-fda757de6e65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.256813 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd942d9-0c00-4f99-9473-fda757de6e65-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.692364 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ecd942d9-0c00-4f99-9473-fda757de6e65","Type":"ContainerDied","Data":"482d30d1226b198ad07f5cfb38a70969c77f3f42214954b5d050b2fce13b5d8e"} Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.692444 4916 scope.go:117] "RemoveContainer" containerID="bce7941ea05c0d81c369d47303b1acca0c3067ff6404847a7dd32a88c96fc6aa" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.692466 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.720967 4916 scope.go:117] "RemoveContainer" containerID="2e540a5688288632acc054191995ed9e07b5605662c79481a17791728a86b097" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.723100 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.732294 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.765075 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 19:50:12 crc kubenswrapper[4916]: E1203 19:50:12.765783 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecd942d9-0c00-4f99-9473-fda757de6e65" containerName="probe" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.765804 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecd942d9-0c00-4f99-9473-fda757de6e65" containerName="probe" Dec 03 19:50:12 crc kubenswrapper[4916]: E1203 19:50:12.765855 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecf17f0e-cc04-4896-b816-a045e729e980" containerName="dnsmasq-dns" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.765864 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecf17f0e-cc04-4896-b816-a045e729e980" containerName="dnsmasq-dns" Dec 03 19:50:12 crc kubenswrapper[4916]: E1203 19:50:12.765887 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecd942d9-0c00-4f99-9473-fda757de6e65" containerName="cinder-scheduler" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.765921 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecd942d9-0c00-4f99-9473-fda757de6e65" containerName="cinder-scheduler" Dec 03 19:50:12 crc kubenswrapper[4916]: E1203 19:50:12.765942 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecf17f0e-cc04-4896-b816-a045e729e980" containerName="init" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.765951 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecf17f0e-cc04-4896-b816-a045e729e980" containerName="init" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.768393 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecf17f0e-cc04-4896-b816-a045e729e980" containerName="dnsmasq-dns" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.768453 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecd942d9-0c00-4f99-9473-fda757de6e65" containerName="probe" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.768500 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecd942d9-0c00-4f99-9473-fda757de6e65" containerName="cinder-scheduler" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.769770 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.773972 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.774410 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.793104 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.872429 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.872574 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-scripts\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.872646 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gz4q8\" (UniqueName: \"kubernetes.io/projected/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-kube-api-access-gz4q8\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.872665 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-config-data\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.872682 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.872707 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.973820 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-scripts\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.973872 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gz4q8\" (UniqueName: \"kubernetes.io/projected/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-kube-api-access-gz4q8\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.973896 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-config-data\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.973916 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.973945 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.973992 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.974072 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.979213 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-config-data\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.982287 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:12 crc kubenswrapper[4916]: I1203 19:50:12.997118 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-scripts\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.012792 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.013207 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gz4q8\" (UniqueName: \"kubernetes.io/projected/2727cbbe-cad4-47ff-b451-2f66b4f65bbf-kube-api-access-gz4q8\") pod \"cinder-scheduler-0\" (UID: \"2727cbbe-cad4-47ff-b451-2f66b4f65bbf\") " pod="openstack/cinder-scheduler-0" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.094559 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.571616 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 19:50:13 crc kubenswrapper[4916]: W1203 19:50:13.574839 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2727cbbe_cad4_47ff_b451_2f66b4f65bbf.slice/crio-3ffbca6fa270d7dfa6d3746b2b27708ef8f5a3dced7dc690266a8d13a74577b1 WatchSource:0}: Error finding container 3ffbca6fa270d7dfa6d3746b2b27708ef8f5a3dced7dc690266a8d13a74577b1: Status 404 returned error can't find the container with id 3ffbca6fa270d7dfa6d3746b2b27708ef8f5a3dced7dc690266a8d13a74577b1 Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.721038 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2727cbbe-cad4-47ff-b451-2f66b4f65bbf","Type":"ContainerStarted","Data":"3ffbca6fa270d7dfa6d3746b2b27708ef8f5a3dced7dc690266a8d13a74577b1"} Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.861639 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.862880 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.867033 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.867202 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-j9p82" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.867206 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.902144 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.902212 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jw82\" (UniqueName: \"kubernetes.io/projected/6b5a6645-8305-4075-b2c2-a243645d7bf3-kube-api-access-4jw82\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.902335 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.902360 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config-secret\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:13 crc kubenswrapper[4916]: I1203 19:50:13.923641 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.003642 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.003686 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config-secret\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.003719 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.003751 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jw82\" (UniqueName: \"kubernetes.io/projected/6b5a6645-8305-4075-b2c2-a243645d7bf3-kube-api-access-4jw82\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.004974 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.007602 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config-secret\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.007893 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.026205 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jw82\" (UniqueName: \"kubernetes.io/projected/6b5a6645-8305-4075-b2c2-a243645d7bf3-kube-api-access-4jw82\") pod \"openstackclient\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " pod="openstack/openstackclient" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.184157 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.522640 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecd942d9-0c00-4f99-9473-fda757de6e65" path="/var/lib/kubelet/pods/ecd942d9-0c00-4f99-9473-fda757de6e65/volumes" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.672859 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.679124 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.716173 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6b599f5db5-cs2bs" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.731934 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6b5a6645-8305-4075-b2c2-a243645d7bf3","Type":"ContainerStarted","Data":"b62dfc50159e011234254fe68cab0934deaaa5e64f854b00fc518730c4eba815"} Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.734212 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2727cbbe-cad4-47ff-b451-2f66b4f65bbf","Type":"ContainerStarted","Data":"9649c68139b17bfdc231ace23fbebd081f504011daf19a0e3e4c62e58aec0330"} Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.783007 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6c6cb986d4-9jlqh"] Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.783252 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6c6cb986d4-9jlqh" podUID="cbf2db2c-002e-431b-9238-071ac2e81d4d" containerName="neutron-api" containerID="cri-o://72e45ef09758b2d244eb02d9ea27fedb602fc0ad50dd520de1d82f9df2d2d07e" gracePeriod=30 Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.783621 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6c6cb986d4-9jlqh" podUID="cbf2db2c-002e-431b-9238-071ac2e81d4d" containerName="neutron-httpd" containerID="cri-o://422309a2a61c801173de90807be0619c19db943ae3e5888ffa1bf023b2730ad6" gracePeriod=30 Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.852355 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-f787c8578-2cjjd" Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.915805 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5d9557c554-w6j5d"] Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.916083 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5d9557c554-w6j5d" podUID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerName="barbican-api-log" containerID="cri-o://da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72" gracePeriod=30 Dec 03 19:50:14 crc kubenswrapper[4916]: I1203 19:50:14.917080 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5d9557c554-w6j5d" podUID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerName="barbican-api" containerID="cri-o://2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f" gracePeriod=30 Dec 03 19:50:15 crc kubenswrapper[4916]: I1203 19:50:15.756710 4916 generic.go:334] "Generic (PLEG): container finished" podID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerID="da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72" exitCode=143 Dec 03 19:50:15 crc kubenswrapper[4916]: I1203 19:50:15.756813 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d9557c554-w6j5d" event={"ID":"0252d76f-bb1d-4341-a584-660f1ecd343a","Type":"ContainerDied","Data":"da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72"} Dec 03 19:50:15 crc kubenswrapper[4916]: I1203 19:50:15.769680 4916 generic.go:334] "Generic (PLEG): container finished" podID="cbf2db2c-002e-431b-9238-071ac2e81d4d" containerID="422309a2a61c801173de90807be0619c19db943ae3e5888ffa1bf023b2730ad6" exitCode=0 Dec 03 19:50:15 crc kubenswrapper[4916]: I1203 19:50:15.769761 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c6cb986d4-9jlqh" event={"ID":"cbf2db2c-002e-431b-9238-071ac2e81d4d","Type":"ContainerDied","Data":"422309a2a61c801173de90807be0619c19db943ae3e5888ffa1bf023b2730ad6"} Dec 03 19:50:15 crc kubenswrapper[4916]: I1203 19:50:15.772234 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2727cbbe-cad4-47ff-b451-2f66b4f65bbf","Type":"ContainerStarted","Data":"a8afc1950c855efa5406865fd0fc388ced389339b18a51f1af9177cd7006a369"} Dec 03 19:50:15 crc kubenswrapper[4916]: I1203 19:50:15.791723 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.791705262 podStartE2EDuration="3.791705262s" podCreationTimestamp="2025-12-03 19:50:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:15.789473982 +0000 UTC m=+1231.752284248" watchObservedRunningTime="2025-12-03 19:50:15.791705262 +0000 UTC m=+1231.754515528" Dec 03 19:50:16 crc kubenswrapper[4916]: I1203 19:50:16.782332 4916 generic.go:334] "Generic (PLEG): container finished" podID="cbf2db2c-002e-431b-9238-071ac2e81d4d" containerID="72e45ef09758b2d244eb02d9ea27fedb602fc0ad50dd520de1d82f9df2d2d07e" exitCode=0 Dec 03 19:50:16 crc kubenswrapper[4916]: I1203 19:50:16.782390 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c6cb986d4-9jlqh" event={"ID":"cbf2db2c-002e-431b-9238-071ac2e81d4d","Type":"ContainerDied","Data":"72e45ef09758b2d244eb02d9ea27fedb602fc0ad50dd520de1d82f9df2d2d07e"} Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.277750 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.376697 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.384148 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-ovndb-tls-certs\") pod \"cbf2db2c-002e-431b-9238-071ac2e81d4d\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.384229 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-combined-ca-bundle\") pod \"cbf2db2c-002e-431b-9238-071ac2e81d4d\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.384296 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-httpd-config\") pod \"cbf2db2c-002e-431b-9238-071ac2e81d4d\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.384338 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7xxt\" (UniqueName: \"kubernetes.io/projected/cbf2db2c-002e-431b-9238-071ac2e81d4d-kube-api-access-f7xxt\") pod \"cbf2db2c-002e-431b-9238-071ac2e81d4d\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.384466 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-config\") pod \"cbf2db2c-002e-431b-9238-071ac2e81d4d\" (UID: \"cbf2db2c-002e-431b-9238-071ac2e81d4d\") " Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.391117 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbf2db2c-002e-431b-9238-071ac2e81d4d-kube-api-access-f7xxt" (OuterVolumeSpecName: "kube-api-access-f7xxt") pod "cbf2db2c-002e-431b-9238-071ac2e81d4d" (UID: "cbf2db2c-002e-431b-9238-071ac2e81d4d"). InnerVolumeSpecName "kube-api-access-f7xxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.393227 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "cbf2db2c-002e-431b-9238-071ac2e81d4d" (UID: "cbf2db2c-002e-431b-9238-071ac2e81d4d"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.474837 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cbf2db2c-002e-431b-9238-071ac2e81d4d" (UID: "cbf2db2c-002e-431b-9238-071ac2e81d4d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.476757 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-config" (OuterVolumeSpecName: "config") pod "cbf2db2c-002e-431b-9238-071ac2e81d4d" (UID: "cbf2db2c-002e-431b-9238-071ac2e81d4d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.492979 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.493013 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.493023 4916 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.493033 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7xxt\" (UniqueName: \"kubernetes.io/projected/cbf2db2c-002e-431b-9238-071ac2e81d4d-kube-api-access-f7xxt\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.523849 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "cbf2db2c-002e-431b-9238-071ac2e81d4d" (UID: "cbf2db2c-002e-431b-9238-071ac2e81d4d"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.595929 4916 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cbf2db2c-002e-431b-9238-071ac2e81d4d-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.794659 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6c6cb986d4-9jlqh" event={"ID":"cbf2db2c-002e-431b-9238-071ac2e81d4d","Type":"ContainerDied","Data":"059635875b6f80999bff6128bb8ddd6ed4c86828b42c5b990f68604d5ac362c6"} Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.794707 4916 scope.go:117] "RemoveContainer" containerID="422309a2a61c801173de90807be0619c19db943ae3e5888ffa1bf023b2730ad6" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.794846 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6c6cb986d4-9jlqh" Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.826980 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6c6cb986d4-9jlqh"] Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.838928 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6c6cb986d4-9jlqh"] Dec 03 19:50:17 crc kubenswrapper[4916]: I1203 19:50:17.843754 4916 scope.go:117] "RemoveContainer" containerID="72e45ef09758b2d244eb02d9ea27fedb602fc0ad50dd520de1d82f9df2d2d07e" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.095544 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.101537 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5d9557c554-w6j5d" podUID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:40718->10.217.0.160:9311: read: connection reset by peer" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.101870 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5d9557c554-w6j5d" podUID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.160:9311/healthcheck\": read tcp 10.217.0.2:40724->10.217.0.160:9311: read: connection reset by peer" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.482881 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.493667 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbf2db2c-002e-431b-9238-071ac2e81d4d" path="/var/lib/kubelet/pods/cbf2db2c-002e-431b-9238-071ac2e81d4d/volumes" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.619178 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0252d76f-bb1d-4341-a584-660f1ecd343a-logs\") pod \"0252d76f-bb1d-4341-a584-660f1ecd343a\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.619338 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-combined-ca-bundle\") pod \"0252d76f-bb1d-4341-a584-660f1ecd343a\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.619376 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzxc5\" (UniqueName: \"kubernetes.io/projected/0252d76f-bb1d-4341-a584-660f1ecd343a-kube-api-access-qzxc5\") pod \"0252d76f-bb1d-4341-a584-660f1ecd343a\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.619461 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data\") pod \"0252d76f-bb1d-4341-a584-660f1ecd343a\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.619500 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data-custom\") pod \"0252d76f-bb1d-4341-a584-660f1ecd343a\" (UID: \"0252d76f-bb1d-4341-a584-660f1ecd343a\") " Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.621759 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0252d76f-bb1d-4341-a584-660f1ecd343a-logs" (OuterVolumeSpecName: "logs") pod "0252d76f-bb1d-4341-a584-660f1ecd343a" (UID: "0252d76f-bb1d-4341-a584-660f1ecd343a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.628354 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0252d76f-bb1d-4341-a584-660f1ecd343a-kube-api-access-qzxc5" (OuterVolumeSpecName: "kube-api-access-qzxc5") pod "0252d76f-bb1d-4341-a584-660f1ecd343a" (UID: "0252d76f-bb1d-4341-a584-660f1ecd343a"). InnerVolumeSpecName "kube-api-access-qzxc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.628666 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.628900 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="ceilometer-central-agent" containerID="cri-o://0137f0c39daa5d8f9102b706b2b8fc14b43af74d9196506c98d3b5a8c8074037" gracePeriod=30 Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.629022 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="proxy-httpd" containerID="cri-o://fcb50210936835d5b058d51fee8e00f49e0cf13eaeb451f09be4424bcc279402" gracePeriod=30 Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.629060 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="sg-core" containerID="cri-o://8e807111b2848e32feeea2d9cc9ea59650a097865daa18487499b00bd599346a" gracePeriod=30 Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.629088 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="ceilometer-notification-agent" containerID="cri-o://f76de2275956e7c7963c6e17153618e3f5173f757c906f457b56d523e3ef0373" gracePeriod=30 Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.632753 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0252d76f-bb1d-4341-a584-660f1ecd343a" (UID: "0252d76f-bb1d-4341-a584-660f1ecd343a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.639004 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.657302 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0252d76f-bb1d-4341-a584-660f1ecd343a" (UID: "0252d76f-bb1d-4341-a584-660f1ecd343a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.679031 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data" (OuterVolumeSpecName: "config-data") pod "0252d76f-bb1d-4341-a584-660f1ecd343a" (UID: "0252d76f-bb1d-4341-a584-660f1ecd343a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.721239 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzxc5\" (UniqueName: \"kubernetes.io/projected/0252d76f-bb1d-4341-a584-660f1ecd343a-kube-api-access-qzxc5\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.721272 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.721283 4916 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.721293 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0252d76f-bb1d-4341-a584-660f1ecd343a-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.721301 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0252d76f-bb1d-4341-a584-660f1ecd343a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.805885 4916 generic.go:334] "Generic (PLEG): container finished" podID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerID="fcb50210936835d5b058d51fee8e00f49e0cf13eaeb451f09be4424bcc279402" exitCode=0 Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.805914 4916 generic.go:334] "Generic (PLEG): container finished" podID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerID="8e807111b2848e32feeea2d9cc9ea59650a097865daa18487499b00bd599346a" exitCode=2 Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.805945 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39cbb12-e84d-4f98-a410-86f3103ea424","Type":"ContainerDied","Data":"fcb50210936835d5b058d51fee8e00f49e0cf13eaeb451f09be4424bcc279402"} Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.805969 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39cbb12-e84d-4f98-a410-86f3103ea424","Type":"ContainerDied","Data":"8e807111b2848e32feeea2d9cc9ea59650a097865daa18487499b00bd599346a"} Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.812840 4916 generic.go:334] "Generic (PLEG): container finished" podID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerID="2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f" exitCode=0 Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.812892 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d9557c554-w6j5d" event={"ID":"0252d76f-bb1d-4341-a584-660f1ecd343a","Type":"ContainerDied","Data":"2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f"} Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.812928 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5d9557c554-w6j5d" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.812966 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d9557c554-w6j5d" event={"ID":"0252d76f-bb1d-4341-a584-660f1ecd343a","Type":"ContainerDied","Data":"37af3923053ecfddba1cf4fc4f37a80c642d18678d28ec597783e58276926804"} Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.812991 4916 scope.go:117] "RemoveContainer" containerID="2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.845627 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5d9557c554-w6j5d"] Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.851316 4916 scope.go:117] "RemoveContainer" containerID="da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.851698 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5d9557c554-w6j5d"] Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.878589 4916 scope.go:117] "RemoveContainer" containerID="2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f" Dec 03 19:50:18 crc kubenswrapper[4916]: E1203 19:50:18.879349 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f\": container with ID starting with 2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f not found: ID does not exist" containerID="2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.879392 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f"} err="failed to get container status \"2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f\": rpc error: code = NotFound desc = could not find container \"2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f\": container with ID starting with 2da92c4c5d274492b4bde5dcc39f0613a4081a038028c4cba7fafa558b14f81f not found: ID does not exist" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.879416 4916 scope.go:117] "RemoveContainer" containerID="da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72" Dec 03 19:50:18 crc kubenswrapper[4916]: E1203 19:50:18.880050 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72\": container with ID starting with da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72 not found: ID does not exist" containerID="da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.880090 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72"} err="failed to get container status \"da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72\": rpc error: code = NotFound desc = could not find container \"da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72\": container with ID starting with da18a70573bde6684b1ee44e29b01082d0619acece0409fb5f55549287314a72 not found: ID does not exist" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.913819 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5fb66fd5df-mqd8w"] Dec 03 19:50:18 crc kubenswrapper[4916]: E1203 19:50:18.914430 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerName="barbican-api" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.914446 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerName="barbican-api" Dec 03 19:50:18 crc kubenswrapper[4916]: E1203 19:50:18.914458 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerName="barbican-api-log" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.914465 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerName="barbican-api-log" Dec 03 19:50:18 crc kubenswrapper[4916]: E1203 19:50:18.914473 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbf2db2c-002e-431b-9238-071ac2e81d4d" containerName="neutron-httpd" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.914480 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbf2db2c-002e-431b-9238-071ac2e81d4d" containerName="neutron-httpd" Dec 03 19:50:18 crc kubenswrapper[4916]: E1203 19:50:18.914501 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbf2db2c-002e-431b-9238-071ac2e81d4d" containerName="neutron-api" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.914508 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbf2db2c-002e-431b-9238-071ac2e81d4d" containerName="neutron-api" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.914765 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbf2db2c-002e-431b-9238-071ac2e81d4d" containerName="neutron-api" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.914784 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerName="barbican-api" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.914792 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbf2db2c-002e-431b-9238-071ac2e81d4d" containerName="neutron-httpd" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.914810 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="0252d76f-bb1d-4341-a584-660f1ecd343a" containerName="barbican-api-log" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.915739 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.921103 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.921410 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.921897 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 19:50:18 crc kubenswrapper[4916]: I1203 19:50:18.968004 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5fb66fd5df-mqd8w"] Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.024815 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-etc-swift\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.024861 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-run-httpd\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.024881 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-log-httpd\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.024918 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-config-data\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.024939 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-public-tls-certs\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.024985 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-internal-tls-certs\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.025008 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-combined-ca-bundle\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.025035 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krxnb\" (UniqueName: \"kubernetes.io/projected/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-kube-api-access-krxnb\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.126655 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krxnb\" (UniqueName: \"kubernetes.io/projected/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-kube-api-access-krxnb\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.126742 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-etc-swift\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.126771 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-run-httpd\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.126786 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-log-httpd\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.126824 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-config-data\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.126847 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-public-tls-certs\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.126889 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-internal-tls-certs\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.126912 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-combined-ca-bundle\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.127553 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-log-httpd\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.127791 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-run-httpd\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.130644 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-combined-ca-bundle\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.135995 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-config-data\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.142672 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-internal-tls-certs\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.144407 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krxnb\" (UniqueName: \"kubernetes.io/projected/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-kube-api-access-krxnb\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.147755 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-etc-swift\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.158748 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c9b70bb-7121-4484-9d1c-f928d26b6f3a-public-tls-certs\") pod \"swift-proxy-5fb66fd5df-mqd8w\" (UID: \"7c9b70bb-7121-4484-9d1c-f928d26b6f3a\") " pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.264327 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.834108 4916 generic.go:334] "Generic (PLEG): container finished" podID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerID="0137f0c39daa5d8f9102b706b2b8fc14b43af74d9196506c98d3b5a8c8074037" exitCode=0 Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.834183 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39cbb12-e84d-4f98-a410-86f3103ea424","Type":"ContainerDied","Data":"0137f0c39daa5d8f9102b706b2b8fc14b43af74d9196506c98d3b5a8c8074037"} Dec 03 19:50:19 crc kubenswrapper[4916]: W1203 19:50:19.856919 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c9b70bb_7121_4484_9d1c_f928d26b6f3a.slice/crio-8536f591757185b7c40299fa258da733ce7060dfc914c55d78788611b9959926 WatchSource:0}: Error finding container 8536f591757185b7c40299fa258da733ce7060dfc914c55d78788611b9959926: Status 404 returned error can't find the container with id 8536f591757185b7c40299fa258da733ce7060dfc914c55d78788611b9959926 Dec 03 19:50:19 crc kubenswrapper[4916]: I1203 19:50:19.870428 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5fb66fd5df-mqd8w"] Dec 03 19:50:20 crc kubenswrapper[4916]: I1203 19:50:20.495215 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0252d76f-bb1d-4341-a584-660f1ecd343a" path="/var/lib/kubelet/pods/0252d76f-bb1d-4341-a584-660f1ecd343a/volumes" Dec 03 19:50:20 crc kubenswrapper[4916]: I1203 19:50:20.844474 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5fb66fd5df-mqd8w" event={"ID":"7c9b70bb-7121-4484-9d1c-f928d26b6f3a","Type":"ContainerStarted","Data":"af02a1036358814ad89249e57a9084c74c419648295886ffe9b57879502ecf76"} Dec 03 19:50:20 crc kubenswrapper[4916]: I1203 19:50:20.844517 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5fb66fd5df-mqd8w" event={"ID":"7c9b70bb-7121-4484-9d1c-f928d26b6f3a","Type":"ContainerStarted","Data":"628af9559228361bfd18e86076ff3c8e6db0760ca14f31b18fc8ae3cddd6755f"} Dec 03 19:50:20 crc kubenswrapper[4916]: I1203 19:50:20.844528 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5fb66fd5df-mqd8w" event={"ID":"7c9b70bb-7121-4484-9d1c-f928d26b6f3a","Type":"ContainerStarted","Data":"8536f591757185b7c40299fa258da733ce7060dfc914c55d78788611b9959926"} Dec 03 19:50:20 crc kubenswrapper[4916]: I1203 19:50:20.845632 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:20 crc kubenswrapper[4916]: I1203 19:50:20.845656 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:20 crc kubenswrapper[4916]: I1203 19:50:20.874734 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5fb66fd5df-mqd8w" podStartSLOduration=2.874706582 podStartE2EDuration="2.874706582s" podCreationTimestamp="2025-12-03 19:50:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:20.868185857 +0000 UTC m=+1236.830996123" watchObservedRunningTime="2025-12-03 19:50:20.874706582 +0000 UTC m=+1236.837516848" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.105676 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-6dd8857784-86hhw"] Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.108497 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.113041 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-29w6f" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.114106 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.114378 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.174611 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6dd8857784-86hhw"] Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.190434 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data-custom\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.190614 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-combined-ca-bundle\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.190646 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.190663 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qcrt\" (UniqueName: \"kubernetes.io/projected/3743884c-79ea-47d6-ad97-92d235fd5a98-kube-api-access-8qcrt\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.228963 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7756b9d78c-6kh76"] Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.230647 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.256911 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7756b9d78c-6kh76"] Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.293661 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-config\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.293698 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-nb\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.293733 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-combined-ca-bundle\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.293758 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.293774 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qcrt\" (UniqueName: \"kubernetes.io/projected/3743884c-79ea-47d6-ad97-92d235fd5a98-kube-api-access-8qcrt\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.293805 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-sb\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.293843 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data-custom\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.293866 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-svc\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.293884 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-swift-storage-0\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.293907 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsbqw\" (UniqueName: \"kubernetes.io/projected/337f63c9-1130-480a-9fa4-8b869540333d-kube-api-access-hsbqw\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.304850 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data-custom\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.310030 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-combined-ca-bundle\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.313264 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.329701 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-86f5cb85df-p52sw"] Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.331123 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.345806 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.365264 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qcrt\" (UniqueName: \"kubernetes.io/projected/3743884c-79ea-47d6-ad97-92d235fd5a98-kube-api-access-8qcrt\") pod \"heat-engine-6dd8857784-86hhw\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.373813 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-556b54b4c6-vrtbp"] Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.375080 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.384979 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.395662 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-config\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.397632 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-config\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.399981 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-nb\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.400140 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data-custom\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.400220 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6wz4\" (UniqueName: \"kubernetes.io/projected/5828ebe5-f058-464d-bb8a-1846217aa15d-kube-api-access-j6wz4\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.400335 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-sb\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.400405 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb869\" (UniqueName: \"kubernetes.io/projected/38cae7c4-cc3b-41b0-9552-e85743db98ab-kube-api-access-kb869\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.401702 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.401786 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-combined-ca-bundle\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.401812 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-combined-ca-bundle\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.401851 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.401882 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-svc\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.401897 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-sb\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.401908 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data-custom\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.402000 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-swift-storage-0\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.402080 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsbqw\" (UniqueName: \"kubernetes.io/projected/337f63c9-1130-480a-9fa4-8b869540333d-kube-api-access-hsbqw\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.402550 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-svc\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.402649 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-86f5cb85df-p52sw"] Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.403051 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-swift-storage-0\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.406367 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-nb\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.429449 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsbqw\" (UniqueName: \"kubernetes.io/projected/337f63c9-1130-480a-9fa4-8b869540333d-kube-api-access-hsbqw\") pod \"dnsmasq-dns-7756b9d78c-6kh76\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.443618 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-556b54b4c6-vrtbp"] Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.445847 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.503203 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data-custom\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.503241 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6wz4\" (UniqueName: \"kubernetes.io/projected/5828ebe5-f058-464d-bb8a-1846217aa15d-kube-api-access-j6wz4\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.503269 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb869\" (UniqueName: \"kubernetes.io/projected/38cae7c4-cc3b-41b0-9552-e85743db98ab-kube-api-access-kb869\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.503295 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.503317 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-combined-ca-bundle\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.503334 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-combined-ca-bundle\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.503360 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.503379 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data-custom\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.512365 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.548214 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb869\" (UniqueName: \"kubernetes.io/projected/38cae7c4-cc3b-41b0-9552-e85743db98ab-kube-api-access-kb869\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.560416 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.561816 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data-custom\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.563592 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6wz4\" (UniqueName: \"kubernetes.io/projected/5828ebe5-f058-464d-bb8a-1846217aa15d-kube-api-access-j6wz4\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.563735 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-combined-ca-bundle\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.569744 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data-custom\") pod \"heat-cfnapi-86f5cb85df-p52sw\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.569963 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.570551 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-combined-ca-bundle\") pod \"heat-api-556b54b4c6-vrtbp\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.810519 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.865200 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.868970 4916 generic.go:334] "Generic (PLEG): container finished" podID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerID="f76de2275956e7c7963c6e17153618e3f5173f757c906f457b56d523e3ef0373" exitCode=0 Dec 03 19:50:22 crc kubenswrapper[4916]: I1203 19:50:22.869018 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39cbb12-e84d-4f98-a410-86f3103ea424","Type":"ContainerDied","Data":"f76de2275956e7c7963c6e17153618e3f5173f757c906f457b56d523e3ef0373"} Dec 03 19:50:23 crc kubenswrapper[4916]: I1203 19:50:23.304807 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 19:50:24 crc kubenswrapper[4916]: I1203 19:50:24.007669 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.156:3000/\": dial tcp 10.217.0.156:3000: connect: connection refused" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.349477 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.392449 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-config-data\") pod \"f39cbb12-e84d-4f98-a410-86f3103ea424\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.392522 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-log-httpd\") pod \"f39cbb12-e84d-4f98-a410-86f3103ea424\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.392578 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-scripts\") pod \"f39cbb12-e84d-4f98-a410-86f3103ea424\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.392630 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-run-httpd\") pod \"f39cbb12-e84d-4f98-a410-86f3103ea424\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.392655 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wttlq\" (UniqueName: \"kubernetes.io/projected/f39cbb12-e84d-4f98-a410-86f3103ea424-kube-api-access-wttlq\") pod \"f39cbb12-e84d-4f98-a410-86f3103ea424\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.392705 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-combined-ca-bundle\") pod \"f39cbb12-e84d-4f98-a410-86f3103ea424\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.392797 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-sg-core-conf-yaml\") pod \"f39cbb12-e84d-4f98-a410-86f3103ea424\" (UID: \"f39cbb12-e84d-4f98-a410-86f3103ea424\") " Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.394631 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f39cbb12-e84d-4f98-a410-86f3103ea424" (UID: "f39cbb12-e84d-4f98-a410-86f3103ea424"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.394861 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f39cbb12-e84d-4f98-a410-86f3103ea424" (UID: "f39cbb12-e84d-4f98-a410-86f3103ea424"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.401163 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-scripts" (OuterVolumeSpecName: "scripts") pod "f39cbb12-e84d-4f98-a410-86f3103ea424" (UID: "f39cbb12-e84d-4f98-a410-86f3103ea424"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.401264 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f39cbb12-e84d-4f98-a410-86f3103ea424-kube-api-access-wttlq" (OuterVolumeSpecName: "kube-api-access-wttlq") pod "f39cbb12-e84d-4f98-a410-86f3103ea424" (UID: "f39cbb12-e84d-4f98-a410-86f3103ea424"). InnerVolumeSpecName "kube-api-access-wttlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.422816 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f39cbb12-e84d-4f98-a410-86f3103ea424" (UID: "f39cbb12-e84d-4f98-a410-86f3103ea424"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.481385 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f39cbb12-e84d-4f98-a410-86f3103ea424" (UID: "f39cbb12-e84d-4f98-a410-86f3103ea424"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.496715 4916 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.498100 4916 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.498118 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.498127 4916 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f39cbb12-e84d-4f98-a410-86f3103ea424-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.498136 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wttlq\" (UniqueName: \"kubernetes.io/projected/f39cbb12-e84d-4f98-a410-86f3103ea424-kube-api-access-wttlq\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.498146 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.516701 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-config-data" (OuterVolumeSpecName: "config-data") pod "f39cbb12-e84d-4f98-a410-86f3103ea424" (UID: "f39cbb12-e84d-4f98-a410-86f3103ea424"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.599862 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f39cbb12-e84d-4f98-a410-86f3103ea424-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.634739 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7756b9d78c-6kh76"] Dec 03 19:50:27 crc kubenswrapper[4916]: W1203 19:50:27.642841 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod337f63c9_1130_480a_9fa4_8b869540333d.slice/crio-c5d8e2be6eaf9219995da1e48f15f2850c3105cfd8848bd5b67f16223dd97c0c WatchSource:0}: Error finding container c5d8e2be6eaf9219995da1e48f15f2850c3105cfd8848bd5b67f16223dd97c0c: Status 404 returned error can't find the container with id c5d8e2be6eaf9219995da1e48f15f2850c3105cfd8848bd5b67f16223dd97c0c Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.736437 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-556b54b4c6-vrtbp"] Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.751923 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-6dd8857784-86hhw"] Dec 03 19:50:27 crc kubenswrapper[4916]: W1203 19:50:27.755537 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38cae7c4_cc3b_41b0_9552_e85743db98ab.slice/crio-4ad08ed0ee9162fe4fa441de7c96c22d35eb276dc9564c29d77b645774943bef WatchSource:0}: Error finding container 4ad08ed0ee9162fe4fa441de7c96c22d35eb276dc9564c29d77b645774943bef: Status 404 returned error can't find the container with id 4ad08ed0ee9162fe4fa441de7c96c22d35eb276dc9564c29d77b645774943bef Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.761593 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-86f5cb85df-p52sw"] Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.919828 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-556b54b4c6-vrtbp" event={"ID":"5828ebe5-f058-464d-bb8a-1846217aa15d","Type":"ContainerStarted","Data":"74d85348961b4d026881125ef2f49896793a72e6ca7a41f66cb2b0b817eb7b3b"} Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.923679 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f39cbb12-e84d-4f98-a410-86f3103ea424","Type":"ContainerDied","Data":"6dd77328be1b555b7015b56065f860f7441b94b8f4e34b27f03f7159c6e0d531"} Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.923706 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.923739 4916 scope.go:117] "RemoveContainer" containerID="fcb50210936835d5b058d51fee8e00f49e0cf13eaeb451f09be4424bcc279402" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.925795 4916 generic.go:334] "Generic (PLEG): container finished" podID="337f63c9-1130-480a-9fa4-8b869540333d" containerID="7469afa4f06848d0d47aa3e5a358a3c0edb2354d0ce188905924d99687989121" exitCode=0 Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.925838 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" event={"ID":"337f63c9-1130-480a-9fa4-8b869540333d","Type":"ContainerDied","Data":"7469afa4f06848d0d47aa3e5a358a3c0edb2354d0ce188905924d99687989121"} Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.925889 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" event={"ID":"337f63c9-1130-480a-9fa4-8b869540333d","Type":"ContainerStarted","Data":"c5d8e2be6eaf9219995da1e48f15f2850c3105cfd8848bd5b67f16223dd97c0c"} Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.929865 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" event={"ID":"38cae7c4-cc3b-41b0-9552-e85743db98ab","Type":"ContainerStarted","Data":"4ad08ed0ee9162fe4fa441de7c96c22d35eb276dc9564c29d77b645774943bef"} Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.939117 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6dd8857784-86hhw" event={"ID":"3743884c-79ea-47d6-ad97-92d235fd5a98","Type":"ContainerStarted","Data":"2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8"} Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.939403 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6dd8857784-86hhw" event={"ID":"3743884c-79ea-47d6-ad97-92d235fd5a98","Type":"ContainerStarted","Data":"a0c5228f04981a6d2fb841b0b325fc01b8729828d87c948c6a28a800530aa858"} Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.940098 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.941675 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6b5a6645-8305-4075-b2c2-a243645d7bf3","Type":"ContainerStarted","Data":"e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f"} Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.979763 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-6dd8857784-86hhw" podStartSLOduration=5.979742478 podStartE2EDuration="5.979742478s" podCreationTimestamp="2025-12-03 19:50:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:27.969039362 +0000 UTC m=+1243.931849628" watchObservedRunningTime="2025-12-03 19:50:27.979742478 +0000 UTC m=+1243.942552744" Dec 03 19:50:27 crc kubenswrapper[4916]: I1203 19:50:27.988749 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.642552104 podStartE2EDuration="14.988728969s" podCreationTimestamp="2025-12-03 19:50:13 +0000 UTC" firstStartedPulling="2025-12-03 19:50:14.688096707 +0000 UTC m=+1230.650906973" lastFinishedPulling="2025-12-03 19:50:27.034273572 +0000 UTC m=+1242.997083838" observedRunningTime="2025-12-03 19:50:27.98578466 +0000 UTC m=+1243.948594926" watchObservedRunningTime="2025-12-03 19:50:27.988728969 +0000 UTC m=+1243.951539235" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.117380 4916 scope.go:117] "RemoveContainer" containerID="8e807111b2848e32feeea2d9cc9ea59650a097865daa18487499b00bd599346a" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.144143 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.149445 4916 scope.go:117] "RemoveContainer" containerID="f76de2275956e7c7963c6e17153618e3f5173f757c906f457b56d523e3ef0373" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.159671 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.172589 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:50:28 crc kubenswrapper[4916]: E1203 19:50:28.173119 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="proxy-httpd" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.173180 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="proxy-httpd" Dec 03 19:50:28 crc kubenswrapper[4916]: E1203 19:50:28.173272 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="sg-core" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.178786 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="sg-core" Dec 03 19:50:28 crc kubenswrapper[4916]: E1203 19:50:28.178911 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="ceilometer-notification-agent" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.178972 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="ceilometer-notification-agent" Dec 03 19:50:28 crc kubenswrapper[4916]: E1203 19:50:28.179025 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="ceilometer-central-agent" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.179073 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="ceilometer-central-agent" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.179417 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="ceilometer-notification-agent" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.179489 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="ceilometer-central-agent" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.179548 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="sg-core" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.179637 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" containerName="proxy-httpd" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.181222 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.183749 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.184274 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.196322 4916 scope.go:117] "RemoveContainer" containerID="0137f0c39daa5d8f9102b706b2b8fc14b43af74d9196506c98d3b5a8c8074037" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.202388 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.209850 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.209894 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-scripts\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.209922 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-config-data\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.209948 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.210036 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-log-httpd\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.210054 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-run-httpd\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.210071 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqv87\" (UniqueName: \"kubernetes.io/projected/8ef40a4d-a930-428c-a816-ad1afa6d6c04-kube-api-access-tqv87\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.312802 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-log-httpd\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.312861 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-run-httpd\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.312892 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqv87\" (UniqueName: \"kubernetes.io/projected/8ef40a4d-a930-428c-a816-ad1afa6d6c04-kube-api-access-tqv87\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.312943 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.312973 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-scripts\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.313002 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-config-data\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.313033 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.314115 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-log-httpd\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.314175 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-run-httpd\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.317510 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.320187 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.321760 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-config-data\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.322547 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-scripts\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.336047 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqv87\" (UniqueName: \"kubernetes.io/projected/8ef40a4d-a930-428c-a816-ad1afa6d6c04-kube-api-access-tqv87\") pod \"ceilometer-0\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.494180 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f39cbb12-e84d-4f98-a410-86f3103ea424" path="/var/lib/kubelet/pods/f39cbb12-e84d-4f98-a410-86f3103ea424/volumes" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.499290 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.735740 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-7b767dc896-5v8nl"] Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.757066 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.792852 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-c989dd47c-7njt7"] Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.798060 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.811713 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-7b767dc896-5v8nl"] Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.819803 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-644d4d84cb-x7l99"] Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.821469 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.822293 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-config-data\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.822339 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrt8m\" (UniqueName: \"kubernetes.io/projected/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-kube-api-access-wrt8m\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.822400 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-combined-ca-bundle\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.822417 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-config-data-custom\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.822445 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data-custom\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.822511 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-combined-ca-bundle\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.822529 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4k7w\" (UniqueName: \"kubernetes.io/projected/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-kube-api-access-b4k7w\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.822547 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.834444 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-c989dd47c-7njt7"] Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.856412 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-644d4d84cb-x7l99"] Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.924698 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-combined-ca-bundle\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.924737 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-config-data-custom\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.924760 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-combined-ca-bundle\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.924795 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data-custom\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.924870 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-combined-ca-bundle\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.924890 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4k7w\" (UniqueName: \"kubernetes.io/projected/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-kube-api-access-b4k7w\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.924906 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.924927 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data-custom\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.924952 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-478gs\" (UniqueName: \"kubernetes.io/projected/b79a775e-2204-42bd-9679-e95e4843b91f-kube-api-access-478gs\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.924970 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-config-data\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.924994 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrt8m\" (UniqueName: \"kubernetes.io/projected/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-kube-api-access-wrt8m\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.925030 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.933434 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-combined-ca-bundle\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.939659 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.942183 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-config-data\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.947510 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-combined-ca-bundle\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.948311 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrt8m\" (UniqueName: \"kubernetes.io/projected/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-kube-api-access-wrt8m\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.957740 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4k7w\" (UniqueName: \"kubernetes.io/projected/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-kube-api-access-b4k7w\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.965218 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7ee41e1-65cb-4642-ae18-5f0a926d8c1d-config-data-custom\") pod \"heat-engine-7b767dc896-5v8nl\" (UID: \"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d\") " pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.965526 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data-custom\") pod \"heat-api-c989dd47c-7njt7\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.978669 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" event={"ID":"337f63c9-1130-480a-9fa4-8b869540333d","Type":"ContainerStarted","Data":"16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113"} Dec 03 19:50:28 crc kubenswrapper[4916]: I1203 19:50:28.979799 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.026840 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-combined-ca-bundle\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.027026 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data-custom\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.027056 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-478gs\" (UniqueName: \"kubernetes.io/projected/b79a775e-2204-42bd-9679-e95e4843b91f-kube-api-access-478gs\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.027118 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.031494 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" podStartSLOduration=7.031479155 podStartE2EDuration="7.031479155s" podCreationTimestamp="2025-12-03 19:50:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:29.006724143 +0000 UTC m=+1244.969534409" watchObservedRunningTime="2025-12-03 19:50:29.031479155 +0000 UTC m=+1244.994289421" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.034156 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data-custom\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.035164 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.038450 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-combined-ca-bundle\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.055206 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-5sdxp"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.056480 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5sdxp" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.063729 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-5sdxp"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.066191 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-478gs\" (UniqueName: \"kubernetes.io/projected/b79a775e-2204-42bd-9679-e95e4843b91f-kube-api-access-478gs\") pod \"heat-cfnapi-644d4d84cb-x7l99\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.087665 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.097624 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.128668 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-operator-scripts\") pod \"nova-api-db-create-5sdxp\" (UID: \"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e\") " pod="openstack/nova-api-db-create-5sdxp" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.128963 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82lrn\" (UniqueName: \"kubernetes.io/projected/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-kube-api-access-82lrn\") pod \"nova-api-db-create-5sdxp\" (UID: \"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e\") " pod="openstack/nova-api-db-create-5sdxp" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.129258 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.141280 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.141982 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-mjjt5"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.144466 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mjjt5" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.203607 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-mjjt5"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.231267 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-operator-scripts\") pod \"nova-api-db-create-5sdxp\" (UID: \"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e\") " pod="openstack/nova-api-db-create-5sdxp" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.231397 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82lrn\" (UniqueName: \"kubernetes.io/projected/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-kube-api-access-82lrn\") pod \"nova-api-db-create-5sdxp\" (UID: \"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e\") " pod="openstack/nova-api-db-create-5sdxp" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.232498 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-operator-scripts\") pod \"nova-api-db-create-5sdxp\" (UID: \"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e\") " pod="openstack/nova-api-db-create-5sdxp" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.244760 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-4786-account-create-update-fl47z"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.246002 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4786-account-create-update-fl47z" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.249784 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82lrn\" (UniqueName: \"kubernetes.io/projected/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-kube-api-access-82lrn\") pod \"nova-api-db-create-5sdxp\" (UID: \"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e\") " pod="openstack/nova-api-db-create-5sdxp" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.250537 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.261615 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4786-account-create-update-fl47z"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.275832 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.296053 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5fb66fd5df-mqd8w" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.332697 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-operator-scripts\") pod \"nova-cell0-db-create-mjjt5\" (UID: \"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd\") " pod="openstack/nova-cell0-db-create-mjjt5" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.332843 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klwfm\" (UniqueName: \"kubernetes.io/projected/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-kube-api-access-klwfm\") pod \"nova-cell0-db-create-mjjt5\" (UID: \"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd\") " pod="openstack/nova-cell0-db-create-mjjt5" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.367518 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-8bs7m"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.368640 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8bs7m" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.393386 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-8bs7m"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.427633 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-e24b-account-create-update-fcl4t"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.429016 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.431345 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.435124 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klwfm\" (UniqueName: \"kubernetes.io/projected/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-kube-api-access-klwfm\") pod \"nova-cell0-db-create-mjjt5\" (UID: \"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd\") " pod="openstack/nova-cell0-db-create-mjjt5" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.435215 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvvq8\" (UniqueName: \"kubernetes.io/projected/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-kube-api-access-dvvq8\") pod \"nova-api-4786-account-create-update-fl47z\" (UID: \"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a\") " pod="openstack/nova-api-4786-account-create-update-fl47z" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.435238 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-operator-scripts\") pod \"nova-cell0-db-create-mjjt5\" (UID: \"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd\") " pod="openstack/nova-cell0-db-create-mjjt5" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.435257 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-operator-scripts\") pod \"nova-api-4786-account-create-update-fl47z\" (UID: \"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a\") " pod="openstack/nova-api-4786-account-create-update-fl47z" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.437083 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-operator-scripts\") pod \"nova-cell0-db-create-mjjt5\" (UID: \"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd\") " pod="openstack/nova-cell0-db-create-mjjt5" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.453833 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e24b-account-create-update-fcl4t"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.460860 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klwfm\" (UniqueName: \"kubernetes.io/projected/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-kube-api-access-klwfm\") pod \"nova-cell0-db-create-mjjt5\" (UID: \"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd\") " pod="openstack/nova-cell0-db-create-mjjt5" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.476114 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5sdxp" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.504612 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mjjt5" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.561931 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvvq8\" (UniqueName: \"kubernetes.io/projected/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-kube-api-access-dvvq8\") pod \"nova-api-4786-account-create-update-fl47z\" (UID: \"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a\") " pod="openstack/nova-api-4786-account-create-update-fl47z" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.561992 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-operator-scripts\") pod \"nova-api-4786-account-create-update-fl47z\" (UID: \"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a\") " pod="openstack/nova-api-4786-account-create-update-fl47z" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.562195 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bthlm\" (UniqueName: \"kubernetes.io/projected/523ba5d4-842e-4726-8b66-813508d7a9d2-kube-api-access-bthlm\") pod \"nova-cell1-db-create-8bs7m\" (UID: \"523ba5d4-842e-4726-8b66-813508d7a9d2\") " pod="openstack/nova-cell1-db-create-8bs7m" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.562284 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tlgl\" (UniqueName: \"kubernetes.io/projected/97f0e085-66c0-48ab-b023-6a4d50e08683-kube-api-access-6tlgl\") pod \"nova-cell0-e24b-account-create-update-fcl4t\" (UID: \"97f0e085-66c0-48ab-b023-6a4d50e08683\") " pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.562464 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/523ba5d4-842e-4726-8b66-813508d7a9d2-operator-scripts\") pod \"nova-cell1-db-create-8bs7m\" (UID: \"523ba5d4-842e-4726-8b66-813508d7a9d2\") " pod="openstack/nova-cell1-db-create-8bs7m" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.562522 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97f0e085-66c0-48ab-b023-6a4d50e08683-operator-scripts\") pod \"nova-cell0-e24b-account-create-update-fcl4t\" (UID: \"97f0e085-66c0-48ab-b023-6a4d50e08683\") " pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.563793 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-operator-scripts\") pod \"nova-api-4786-account-create-update-fl47z\" (UID: \"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a\") " pod="openstack/nova-api-4786-account-create-update-fl47z" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.569461 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-6491-account-create-update-rn5pw"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.570963 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6491-account-create-update-rn5pw" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.575167 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.584423 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6491-account-create-update-rn5pw"] Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.596302 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvvq8\" (UniqueName: \"kubernetes.io/projected/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-kube-api-access-dvvq8\") pod \"nova-api-4786-account-create-update-fl47z\" (UID: \"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a\") " pod="openstack/nova-api-4786-account-create-update-fl47z" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.665612 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bthlm\" (UniqueName: \"kubernetes.io/projected/523ba5d4-842e-4726-8b66-813508d7a9d2-kube-api-access-bthlm\") pod \"nova-cell1-db-create-8bs7m\" (UID: \"523ba5d4-842e-4726-8b66-813508d7a9d2\") " pod="openstack/nova-cell1-db-create-8bs7m" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.665679 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tlgl\" (UniqueName: \"kubernetes.io/projected/97f0e085-66c0-48ab-b023-6a4d50e08683-kube-api-access-6tlgl\") pod \"nova-cell0-e24b-account-create-update-fcl4t\" (UID: \"97f0e085-66c0-48ab-b023-6a4d50e08683\") " pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.665747 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/482fafd3-d1fd-4235-888d-aa645bdaa1e3-operator-scripts\") pod \"nova-cell1-6491-account-create-update-rn5pw\" (UID: \"482fafd3-d1fd-4235-888d-aa645bdaa1e3\") " pod="openstack/nova-cell1-6491-account-create-update-rn5pw" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.665804 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/523ba5d4-842e-4726-8b66-813508d7a9d2-operator-scripts\") pod \"nova-cell1-db-create-8bs7m\" (UID: \"523ba5d4-842e-4726-8b66-813508d7a9d2\") " pod="openstack/nova-cell1-db-create-8bs7m" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.665840 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97f0e085-66c0-48ab-b023-6a4d50e08683-operator-scripts\") pod \"nova-cell0-e24b-account-create-update-fcl4t\" (UID: \"97f0e085-66c0-48ab-b023-6a4d50e08683\") " pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.665873 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv57k\" (UniqueName: \"kubernetes.io/projected/482fafd3-d1fd-4235-888d-aa645bdaa1e3-kube-api-access-dv57k\") pod \"nova-cell1-6491-account-create-update-rn5pw\" (UID: \"482fafd3-d1fd-4235-888d-aa645bdaa1e3\") " pod="openstack/nova-cell1-6491-account-create-update-rn5pw" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.667184 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/523ba5d4-842e-4726-8b66-813508d7a9d2-operator-scripts\") pod \"nova-cell1-db-create-8bs7m\" (UID: \"523ba5d4-842e-4726-8b66-813508d7a9d2\") " pod="openstack/nova-cell1-db-create-8bs7m" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.667864 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97f0e085-66c0-48ab-b023-6a4d50e08683-operator-scripts\") pod \"nova-cell0-e24b-account-create-update-fcl4t\" (UID: \"97f0e085-66c0-48ab-b023-6a4d50e08683\") " pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.684806 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tlgl\" (UniqueName: \"kubernetes.io/projected/97f0e085-66c0-48ab-b023-6a4d50e08683-kube-api-access-6tlgl\") pod \"nova-cell0-e24b-account-create-update-fcl4t\" (UID: \"97f0e085-66c0-48ab-b023-6a4d50e08683\") " pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.687114 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bthlm\" (UniqueName: \"kubernetes.io/projected/523ba5d4-842e-4726-8b66-813508d7a9d2-kube-api-access-bthlm\") pod \"nova-cell1-db-create-8bs7m\" (UID: \"523ba5d4-842e-4726-8b66-813508d7a9d2\") " pod="openstack/nova-cell1-db-create-8bs7m" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.695806 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8bs7m" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.766508 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv57k\" (UniqueName: \"kubernetes.io/projected/482fafd3-d1fd-4235-888d-aa645bdaa1e3-kube-api-access-dv57k\") pod \"nova-cell1-6491-account-create-update-rn5pw\" (UID: \"482fafd3-d1fd-4235-888d-aa645bdaa1e3\") " pod="openstack/nova-cell1-6491-account-create-update-rn5pw" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.766657 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/482fafd3-d1fd-4235-888d-aa645bdaa1e3-operator-scripts\") pod \"nova-cell1-6491-account-create-update-rn5pw\" (UID: \"482fafd3-d1fd-4235-888d-aa645bdaa1e3\") " pod="openstack/nova-cell1-6491-account-create-update-rn5pw" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.767471 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/482fafd3-d1fd-4235-888d-aa645bdaa1e3-operator-scripts\") pod \"nova-cell1-6491-account-create-update-rn5pw\" (UID: \"482fafd3-d1fd-4235-888d-aa645bdaa1e3\") " pod="openstack/nova-cell1-6491-account-create-update-rn5pw" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.778069 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.794157 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv57k\" (UniqueName: \"kubernetes.io/projected/482fafd3-d1fd-4235-888d-aa645bdaa1e3-kube-api-access-dv57k\") pod \"nova-cell1-6491-account-create-update-rn5pw\" (UID: \"482fafd3-d1fd-4235-888d-aa645bdaa1e3\") " pod="openstack/nova-cell1-6491-account-create-update-rn5pw" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.864921 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4786-account-create-update-fl47z" Dec 03 19:50:29 crc kubenswrapper[4916]: I1203 19:50:29.932301 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6491-account-create-update-rn5pw" Dec 03 19:50:30 crc kubenswrapper[4916]: I1203 19:50:30.020759 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ef40a4d-a930-428c-a816-ad1afa6d6c04","Type":"ContainerStarted","Data":"3549d6fdade644f51202dbee95f18ed0766c322b7c0bffde5e170d8d7a7710b4"} Dec 03 19:50:30 crc kubenswrapper[4916]: I1203 19:50:30.053046 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-644d4d84cb-x7l99"] Dec 03 19:50:30 crc kubenswrapper[4916]: I1203 19:50:30.074838 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-7b767dc896-5v8nl"] Dec 03 19:50:30 crc kubenswrapper[4916]: I1203 19:50:30.096341 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-c989dd47c-7njt7"] Dec 03 19:50:30 crc kubenswrapper[4916]: I1203 19:50:30.225714 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-mjjt5"] Dec 03 19:50:30 crc kubenswrapper[4916]: I1203 19:50:30.254010 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-5sdxp"] Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.269341 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-556b54b4c6-vrtbp"] Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.292511 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-86f5cb85df-p52sw"] Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.316451 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-56c49bcc9c-497gn"] Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.317910 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.324451 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-public-svc" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.335227 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-api-internal-svc" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.343408 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-config-data-custom\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.343452 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-internal-tls-certs\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.343533 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcgk4\" (UniqueName: \"kubernetes.io/projected/af5156d3-f2f0-4963-8561-5eac0b719c9a-kube-api-access-pcgk4\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.343562 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-public-tls-certs\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.343603 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-combined-ca-bundle\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.343634 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-config-data\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.370449 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-84fcbd5864-k72dj"] Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.371739 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.374981 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-internal-svc" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.375222 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-heat-cfnapi-public-svc" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.416830 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-56c49bcc9c-497gn"] Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.429710 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-84fcbd5864-k72dj"] Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444072 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-config-data\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444115 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-combined-ca-bundle\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444141 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-config-data-custom\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444158 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-config-data\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444188 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-internal-tls-certs\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444222 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-public-tls-certs\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444242 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzz2v\" (UniqueName: \"kubernetes.io/projected/480f4ab0-3854-480f-9dd8-d44be1454e48-kube-api-access-hzz2v\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444260 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-internal-tls-certs\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444283 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-config-data-custom\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444331 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcgk4\" (UniqueName: \"kubernetes.io/projected/af5156d3-f2f0-4963-8561-5eac0b719c9a-kube-api-access-pcgk4\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444362 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-public-tls-certs\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.444389 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-combined-ca-bundle\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.452594 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-combined-ca-bundle\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.453916 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-config-data\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.456230 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-public-tls-certs\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.471756 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-config-data-custom\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.480637 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcgk4\" (UniqueName: \"kubernetes.io/projected/af5156d3-f2f0-4963-8561-5eac0b719c9a-kube-api-access-pcgk4\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.484251 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/af5156d3-f2f0-4963-8561-5eac0b719c9a-internal-tls-certs\") pod \"heat-api-56c49bcc9c-497gn\" (UID: \"af5156d3-f2f0-4963-8561-5eac0b719c9a\") " pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.546784 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-combined-ca-bundle\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.546833 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-config-data\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.546886 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-public-tls-certs\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.546905 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzz2v\" (UniqueName: \"kubernetes.io/projected/480f4ab0-3854-480f-9dd8-d44be1454e48-kube-api-access-hzz2v\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.546925 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-internal-tls-certs\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.546954 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-config-data-custom\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.557319 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-public-tls-certs\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.557330 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-internal-tls-certs\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.557631 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-combined-ca-bundle\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.560911 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-config-data\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.570486 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/480f4ab0-3854-480f-9dd8-d44be1454e48-config-data-custom\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.577387 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzz2v\" (UniqueName: \"kubernetes.io/projected/480f4ab0-3854-480f-9dd8-d44be1454e48-kube-api-access-hzz2v\") pod \"heat-cfnapi-84fcbd5864-k72dj\" (UID: \"480f4ab0-3854-480f-9dd8-d44be1454e48\") " pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.698326 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:31 crc kubenswrapper[4916]: I1203 19:50:31.717691 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:31 crc kubenswrapper[4916]: W1203 19:50:31.933859 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7ee41e1_65cb_4642_ae18_5f0a926d8c1d.slice/crio-72509b5f26befe7fee916e789514b340c2cffa9f078384d2784db8ec8c4800f4 WatchSource:0}: Error finding container 72509b5f26befe7fee916e789514b340c2cffa9f078384d2784db8ec8c4800f4: Status 404 returned error can't find the container with id 72509b5f26befe7fee916e789514b340c2cffa9f078384d2784db8ec8c4800f4 Dec 03 19:50:31 crc kubenswrapper[4916]: W1203 19:50:31.961194 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a20edfd_dcdf_4b70_aa9a_c930b6210dcd.slice/crio-50ac72976dce9e8a1c34f2541096f8b24a3c7fdc5f8865fb9b088de544031524 WatchSource:0}: Error finding container 50ac72976dce9e8a1c34f2541096f8b24a3c7fdc5f8865fb9b088de544031524: Status 404 returned error can't find the container with id 50ac72976dce9e8a1c34f2541096f8b24a3c7fdc5f8865fb9b088de544031524 Dec 03 19:50:31 crc kubenswrapper[4916]: W1203 19:50:31.964032 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbfeee7e_2dd3_44b2_bd95_bcde8f377e1e.slice/crio-87727ab61e5170dd5f075d79e8483c505a87dc1f7c617d3d09a38658b77eec12 WatchSource:0}: Error finding container 87727ab61e5170dd5f075d79e8483c505a87dc1f7c617d3d09a38658b77eec12: Status 404 returned error can't find the container with id 87727ab61e5170dd5f075d79e8483c505a87dc1f7c617d3d09a38658b77eec12 Dec 03 19:50:31 crc kubenswrapper[4916]: W1203 19:50:31.964607 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5fb36950_9cbe_4a60_ac1b_5ce9d555b265.slice/crio-4e2802f80e30e6465abbf5c368d24b821d0b71323f19f3c598f43c177fe70c00 WatchSource:0}: Error finding container 4e2802f80e30e6465abbf5c368d24b821d0b71323f19f3c598f43c177fe70c00: Status 404 returned error can't find the container with id 4e2802f80e30e6465abbf5c368d24b821d0b71323f19f3c598f43c177fe70c00 Dec 03 19:50:32 crc kubenswrapper[4916]: I1203 19:50:32.040547 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" event={"ID":"b79a775e-2204-42bd-9679-e95e4843b91f","Type":"ContainerStarted","Data":"e003bfed3d6d50289e8ba310f0f5600391c1503db610beef01b4d82a449e839b"} Dec 03 19:50:32 crc kubenswrapper[4916]: I1203 19:50:32.042036 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7b767dc896-5v8nl" event={"ID":"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d","Type":"ContainerStarted","Data":"72509b5f26befe7fee916e789514b340c2cffa9f078384d2784db8ec8c4800f4"} Dec 03 19:50:32 crc kubenswrapper[4916]: I1203 19:50:32.043089 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5sdxp" event={"ID":"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e","Type":"ContainerStarted","Data":"87727ab61e5170dd5f075d79e8483c505a87dc1f7c617d3d09a38658b77eec12"} Dec 03 19:50:32 crc kubenswrapper[4916]: I1203 19:50:32.046810 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c989dd47c-7njt7" event={"ID":"5fb36950-9cbe-4a60-ac1b-5ce9d555b265","Type":"ContainerStarted","Data":"4e2802f80e30e6465abbf5c368d24b821d0b71323f19f3c598f43c177fe70c00"} Dec 03 19:50:32 crc kubenswrapper[4916]: I1203 19:50:32.048690 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mjjt5" event={"ID":"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd","Type":"ContainerStarted","Data":"50ac72976dce9e8a1c34f2541096f8b24a3c7fdc5f8865fb9b088de544031524"} Dec 03 19:50:32 crc kubenswrapper[4916]: I1203 19:50:32.636930 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-8bs7m"] Dec 03 19:50:32 crc kubenswrapper[4916]: I1203 19:50:32.812437 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6491-account-create-update-rn5pw"] Dec 03 19:50:32 crc kubenswrapper[4916]: I1203 19:50:32.890173 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e24b-account-create-update-fcl4t"] Dec 03 19:50:32 crc kubenswrapper[4916]: I1203 19:50:32.938005 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-4786-account-create-update-fl47z"] Dec 03 19:50:32 crc kubenswrapper[4916]: W1203 19:50:32.955449 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5555e47_dacf_4ef9_80a4_a1bbd57dca1a.slice/crio-6b28128abc77cf4313aa987bd65d13e914ab922397e374eeb3a3b0efd15893c6 WatchSource:0}: Error finding container 6b28128abc77cf4313aa987bd65d13e914ab922397e374eeb3a3b0efd15893c6: Status 404 returned error can't find the container with id 6b28128abc77cf4313aa987bd65d13e914ab922397e374eeb3a3b0efd15893c6 Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.060085 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4786-account-create-update-fl47z" event={"ID":"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a","Type":"ContainerStarted","Data":"6b28128abc77cf4313aa987bd65d13e914ab922397e374eeb3a3b0efd15893c6"} Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.061231 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mjjt5" event={"ID":"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd","Type":"ContainerStarted","Data":"14acda667d359a2b662693c049ea2e397bccda516dfa3a5264a4348972804bd5"} Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.067772 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6491-account-create-update-rn5pw" event={"ID":"482fafd3-d1fd-4235-888d-aa645bdaa1e3","Type":"ContainerStarted","Data":"0ec17cee47bf5969d007e085785f5fd07eb7ae104786d0ccadf534afcdaf845a"} Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.071528 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" event={"ID":"97f0e085-66c0-48ab-b023-6a4d50e08683","Type":"ContainerStarted","Data":"98a36c675233af04d32dc580c094ebc6c40c1860e2db5eef1d9967edac7371e2"} Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.083163 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-mjjt5" podStartSLOduration=4.083145094 podStartE2EDuration="4.083145094s" podCreationTimestamp="2025-12-03 19:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:33.0755386 +0000 UTC m=+1249.038348866" watchObservedRunningTime="2025-12-03 19:50:33.083145094 +0000 UTC m=+1249.045955360" Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.088544 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-7b767dc896-5v8nl" event={"ID":"e7ee41e1-65cb-4642-ae18-5f0a926d8c1d","Type":"ContainerStarted","Data":"76b4637607bc7740a740a30a992b16c3007987dcc6d68718458268c21c65d41f"} Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.088799 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.096442 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8bs7m" event={"ID":"523ba5d4-842e-4726-8b66-813508d7a9d2","Type":"ContainerStarted","Data":"fdb671283360b149469ccefc74eb1608abe1a8e40faa4ee5194bcf7cbfbe2e8f"} Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.097603 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-84fcbd5864-k72dj"] Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.098831 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5sdxp" event={"ID":"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e","Type":"ContainerStarted","Data":"20a35aba4807ea23c2ccad2b4be130ab685a2f3d08a66d1e32e4dfb8fa992b8d"} Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.101416 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" event={"ID":"38cae7c4-cc3b-41b0-9552-e85743db98ab","Type":"ContainerStarted","Data":"939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220"} Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.101518 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" podUID="38cae7c4-cc3b-41b0-9552-e85743db98ab" containerName="heat-cfnapi" containerID="cri-o://939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220" gracePeriod=60 Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.101622 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.106965 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-56c49bcc9c-497gn"] Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.126833 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-7b767dc896-5v8nl" podStartSLOduration=5.126809371 podStartE2EDuration="5.126809371s" podCreationTimestamp="2025-12-03 19:50:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:33.105525322 +0000 UTC m=+1249.068335608" watchObservedRunningTime="2025-12-03 19:50:33.126809371 +0000 UTC m=+1249.089619637" Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.130834 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" podStartSLOduration=6.759551222 podStartE2EDuration="11.130823979s" podCreationTimestamp="2025-12-03 19:50:22 +0000 UTC" firstStartedPulling="2025-12-03 19:50:27.75845441 +0000 UTC m=+1243.721264676" lastFinishedPulling="2025-12-03 19:50:32.129727167 +0000 UTC m=+1248.092537433" observedRunningTime="2025-12-03 19:50:33.121136049 +0000 UTC m=+1249.083946305" watchObservedRunningTime="2025-12-03 19:50:33.130823979 +0000 UTC m=+1249.093634245" Dec 03 19:50:33 crc kubenswrapper[4916]: I1203 19:50:33.145102 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-5sdxp" podStartSLOduration=4.1450844 podStartE2EDuration="4.1450844s" podCreationTimestamp="2025-12-03 19:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:33.141258738 +0000 UTC m=+1249.104069014" watchObservedRunningTime="2025-12-03 19:50:33.1450844 +0000 UTC m=+1249.107894656" Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.206040 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-556b54b4c6-vrtbp" event={"ID":"5828ebe5-f058-464d-bb8a-1846217aa15d","Type":"ContainerStarted","Data":"f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.209293 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-api-556b54b4c6-vrtbp" podUID="5828ebe5-f058-464d-bb8a-1846217aa15d" containerName="heat-api" containerID="cri-o://f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887" gracePeriod=60 Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.209448 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.239415 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ef40a4d-a930-428c-a816-ad1afa6d6c04","Type":"ContainerStarted","Data":"083e30ff46487b6262a4d0df4d8eacd55634dbc1c6efd709527b80a28d12cfd4"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.263730 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6491-account-create-update-rn5pw" event={"ID":"482fafd3-d1fd-4235-888d-aa645bdaa1e3","Type":"ContainerStarted","Data":"0b4e4dddabc7f02d77448e14bc4c95b11fc7fd86b91c67eca6925a68ef79d8d7"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.277498 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-556b54b4c6-vrtbp" podStartSLOduration=7.90887071 podStartE2EDuration="12.277480245s" podCreationTimestamp="2025-12-03 19:50:22 +0000 UTC" firstStartedPulling="2025-12-03 19:50:27.751302649 +0000 UTC m=+1243.714112915" lastFinishedPulling="2025-12-03 19:50:32.119912184 +0000 UTC m=+1248.082722450" observedRunningTime="2025-12-03 19:50:34.252549018 +0000 UTC m=+1250.215359284" watchObservedRunningTime="2025-12-03 19:50:34.277480245 +0000 UTC m=+1250.240290511" Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.292407 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-84fcbd5864-k72dj" event={"ID":"480f4ab0-3854-480f-9dd8-d44be1454e48","Type":"ContainerStarted","Data":"6fdf83f3dff4e09a9e2d8cc9beb402eb6852e04463b3690483487dc17b2df557"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.292722 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-84fcbd5864-k72dj" event={"ID":"480f4ab0-3854-480f-9dd8-d44be1454e48","Type":"ContainerStarted","Data":"b2017afb4994f91575952b26d0cbf7f3656146cfa634d4d61a453e5a24266252"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.294049 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.305181 4916 generic.go:334] "Generic (PLEG): container finished" podID="cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e" containerID="20a35aba4807ea23c2ccad2b4be130ab685a2f3d08a66d1e32e4dfb8fa992b8d" exitCode=0 Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.305258 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5sdxp" event={"ID":"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e","Type":"ContainerDied","Data":"20a35aba4807ea23c2ccad2b4be130ab685a2f3d08a66d1e32e4dfb8fa992b8d"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.306828 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4786-account-create-update-fl47z" event={"ID":"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a","Type":"ContainerStarted","Data":"a08fbda909c28e16479d4ce436f4f27a71a4d4a5bca53129f2755744aca4295e"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.310909 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-56c49bcc9c-497gn" event={"ID":"af5156d3-f2f0-4963-8561-5eac0b719c9a","Type":"ContainerStarted","Data":"68063a202b8b991bd58245f65cfa240981c2540f259f4be753f7ec792b7ced86"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.311369 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.312830 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-6491-account-create-update-rn5pw" podStartSLOduration=5.31281789 podStartE2EDuration="5.31281789s" podCreationTimestamp="2025-12-03 19:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:34.288028127 +0000 UTC m=+1250.250838393" watchObservedRunningTime="2025-12-03 19:50:34.31281789 +0000 UTC m=+1250.275628176" Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.314952 4916 generic.go:334] "Generic (PLEG): container finished" podID="b79a775e-2204-42bd-9679-e95e4843b91f" containerID="146bb7223e772a676a1f226562187d02c458960d420b9996fb84ec74333797c1" exitCode=1 Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.315010 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" event={"ID":"b79a775e-2204-42bd-9679-e95e4843b91f","Type":"ContainerDied","Data":"146bb7223e772a676a1f226562187d02c458960d420b9996fb84ec74333797c1"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.315367 4916 scope.go:117] "RemoveContainer" containerID="146bb7223e772a676a1f226562187d02c458960d420b9996fb84ec74333797c1" Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.346679 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-84fcbd5864-k72dj" podStartSLOduration=3.346655845 podStartE2EDuration="3.346655845s" podCreationTimestamp="2025-12-03 19:50:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:34.316052487 +0000 UTC m=+1250.278862753" watchObservedRunningTime="2025-12-03 19:50:34.346655845 +0000 UTC m=+1250.309466111" Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.384275 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-56c49bcc9c-497gn" podStartSLOduration=3.384251141 podStartE2EDuration="3.384251141s" podCreationTimestamp="2025-12-03 19:50:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:34.333248577 +0000 UTC m=+1250.296058843" watchObservedRunningTime="2025-12-03 19:50:34.384251141 +0000 UTC m=+1250.347061397" Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.393477 4916 generic.go:334] "Generic (PLEG): container finished" podID="523ba5d4-842e-4726-8b66-813508d7a9d2" containerID="b49a93ae7a4d362c6b300c8bdc9910e9377074a5101adbb6faaf57b411f316f6" exitCode=0 Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.393668 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8bs7m" event={"ID":"523ba5d4-842e-4726-8b66-813508d7a9d2","Type":"ContainerDied","Data":"b49a93ae7a4d362c6b300c8bdc9910e9377074a5101adbb6faaf57b411f316f6"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.405603 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-4786-account-create-update-fl47z" podStartSLOduration=5.405577761 podStartE2EDuration="5.405577761s" podCreationTimestamp="2025-12-03 19:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:34.357130225 +0000 UTC m=+1250.319940491" watchObservedRunningTime="2025-12-03 19:50:34.405577761 +0000 UTC m=+1250.368388027" Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.413864 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c989dd47c-7njt7" event={"ID":"5fb36950-9cbe-4a60-ac1b-5ce9d555b265","Type":"ContainerStarted","Data":"6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.414451 4916 scope.go:117] "RemoveContainer" containerID="6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1" Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.420836 4916 generic.go:334] "Generic (PLEG): container finished" podID="7a20edfd-dcdf-4b70-aa9a-c930b6210dcd" containerID="14acda667d359a2b662693c049ea2e397bccda516dfa3a5264a4348972804bd5" exitCode=0 Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.421225 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mjjt5" event={"ID":"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd","Type":"ContainerDied","Data":"14acda667d359a2b662693c049ea2e397bccda516dfa3a5264a4348972804bd5"} Dec 03 19:50:34 crc kubenswrapper[4916]: I1203 19:50:34.524699 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" podStartSLOduration=5.524669856 podStartE2EDuration="5.524669856s" podCreationTimestamp="2025-12-03 19:50:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:34.438098311 +0000 UTC m=+1250.400908577" watchObservedRunningTime="2025-12-03 19:50:34.524669856 +0000 UTC m=+1250.487480122" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.038631 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.039293 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4fe190f9-3a33-4b45-809e-1bbff64ab3fb" containerName="glance-log" containerID="cri-o://3e204a8da2a04a4e00c481245b1b2d3acbb6b5af234256168fe1f97d4922f9df" gracePeriod=30 Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.039708 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="4fe190f9-3a33-4b45-809e-1bbff64ab3fb" containerName="glance-httpd" containerID="cri-o://c78345133e3de8f4802024621da8b100d7e6373e8ee6ed90578fe4f5f58f9fbb" gracePeriod=30 Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.092283 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.168234 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-combined-ca-bundle\") pod \"5828ebe5-f058-464d-bb8a-1846217aa15d\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.168299 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6wz4\" (UniqueName: \"kubernetes.io/projected/5828ebe5-f058-464d-bb8a-1846217aa15d-kube-api-access-j6wz4\") pod \"5828ebe5-f058-464d-bb8a-1846217aa15d\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.168384 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data-custom\") pod \"5828ebe5-f058-464d-bb8a-1846217aa15d\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.168444 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data\") pod \"5828ebe5-f058-464d-bb8a-1846217aa15d\" (UID: \"5828ebe5-f058-464d-bb8a-1846217aa15d\") " Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.174324 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5828ebe5-f058-464d-bb8a-1846217aa15d-kube-api-access-j6wz4" (OuterVolumeSpecName: "kube-api-access-j6wz4") pod "5828ebe5-f058-464d-bb8a-1846217aa15d" (UID: "5828ebe5-f058-464d-bb8a-1846217aa15d"). InnerVolumeSpecName "kube-api-access-j6wz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.177794 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5828ebe5-f058-464d-bb8a-1846217aa15d" (UID: "5828ebe5-f058-464d-bb8a-1846217aa15d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.204337 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5828ebe5-f058-464d-bb8a-1846217aa15d" (UID: "5828ebe5-f058-464d-bb8a-1846217aa15d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.248552 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data" (OuterVolumeSpecName: "config-data") pod "5828ebe5-f058-464d-bb8a-1846217aa15d" (UID: "5828ebe5-f058-464d-bb8a-1846217aa15d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.269767 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.269940 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6wz4\" (UniqueName: \"kubernetes.io/projected/5828ebe5-f058-464d-bb8a-1846217aa15d-kube-api-access-j6wz4\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.269997 4916 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.270100 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5828ebe5-f058-464d-bb8a-1846217aa15d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.302100 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.431928 4916 generic.go:334] "Generic (PLEG): container finished" podID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" containerID="6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1" exitCode=1 Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.432183 4916 generic.go:334] "Generic (PLEG): container finished" podID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" containerID="262d036420dab0d05017f8112f29f52bddb9746ee2f9bf55e99cba23dab87434" exitCode=1 Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.432004 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c989dd47c-7njt7" event={"ID":"5fb36950-9cbe-4a60-ac1b-5ce9d555b265","Type":"ContainerDied","Data":"6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.432385 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c989dd47c-7njt7" event={"ID":"5fb36950-9cbe-4a60-ac1b-5ce9d555b265","Type":"ContainerDied","Data":"262d036420dab0d05017f8112f29f52bddb9746ee2f9bf55e99cba23dab87434"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.432411 4916 scope.go:117] "RemoveContainer" containerID="6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.432839 4916 scope.go:117] "RemoveContainer" containerID="262d036420dab0d05017f8112f29f52bddb9746ee2f9bf55e99cba23dab87434" Dec 03 19:50:35 crc kubenswrapper[4916]: E1203 19:50:35.433813 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-c989dd47c-7njt7_openstack(5fb36950-9cbe-4a60-ac1b-5ce9d555b265)\"" pod="openstack/heat-api-c989dd47c-7njt7" podUID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.434944 4916 generic.go:334] "Generic (PLEG): container finished" podID="4fe190f9-3a33-4b45-809e-1bbff64ab3fb" containerID="3e204a8da2a04a4e00c481245b1b2d3acbb6b5af234256168fe1f97d4922f9df" exitCode=143 Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.435018 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4fe190f9-3a33-4b45-809e-1bbff64ab3fb","Type":"ContainerDied","Data":"3e204a8da2a04a4e00c481245b1b2d3acbb6b5af234256168fe1f97d4922f9df"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.436704 4916 generic.go:334] "Generic (PLEG): container finished" podID="f5555e47-dacf-4ef9-80a4-a1bbd57dca1a" containerID="a08fbda909c28e16479d4ce436f4f27a71a4d4a5bca53129f2755744aca4295e" exitCode=0 Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.436750 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4786-account-create-update-fl47z" event={"ID":"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a","Type":"ContainerDied","Data":"a08fbda909c28e16479d4ce436f4f27a71a4d4a5bca53129f2755744aca4295e"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.439929 4916 generic.go:334] "Generic (PLEG): container finished" podID="5828ebe5-f058-464d-bb8a-1846217aa15d" containerID="f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887" exitCode=0 Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.439982 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-556b54b4c6-vrtbp" event={"ID":"5828ebe5-f058-464d-bb8a-1846217aa15d","Type":"ContainerDied","Data":"f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.440006 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-556b54b4c6-vrtbp" event={"ID":"5828ebe5-f058-464d-bb8a-1846217aa15d","Type":"ContainerDied","Data":"74d85348961b4d026881125ef2f49896793a72e6ca7a41f66cb2b0b817eb7b3b"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.440044 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-556b54b4c6-vrtbp" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.466349 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-56c49bcc9c-497gn" event={"ID":"af5156d3-f2f0-4963-8561-5eac0b719c9a","Type":"ContainerStarted","Data":"88b54732cf0d219a387f9c8748e29a67bbfbf1edfe827b9246990fc8df79ea8d"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.473617 4916 generic.go:334] "Generic (PLEG): container finished" podID="482fafd3-d1fd-4235-888d-aa645bdaa1e3" containerID="0b4e4dddabc7f02d77448e14bc4c95b11fc7fd86b91c67eca6925a68ef79d8d7" exitCode=0 Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.473695 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6491-account-create-update-rn5pw" event={"ID":"482fafd3-d1fd-4235-888d-aa645bdaa1e3","Type":"ContainerDied","Data":"0b4e4dddabc7f02d77448e14bc4c95b11fc7fd86b91c67eca6925a68ef79d8d7"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.487394 4916 generic.go:334] "Generic (PLEG): container finished" podID="b79a775e-2204-42bd-9679-e95e4843b91f" containerID="337b1ff6a116f4bcd3f5c6b0dbb302c7ffed021d7e044b8f298e057fc4b93aab" exitCode=1 Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.487477 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" event={"ID":"b79a775e-2204-42bd-9679-e95e4843b91f","Type":"ContainerDied","Data":"337b1ff6a116f4bcd3f5c6b0dbb302c7ffed021d7e044b8f298e057fc4b93aab"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.487751 4916 scope.go:117] "RemoveContainer" containerID="337b1ff6a116f4bcd3f5c6b0dbb302c7ffed021d7e044b8f298e057fc4b93aab" Dec 03 19:50:35 crc kubenswrapper[4916]: E1203 19:50:35.487940 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-644d4d84cb-x7l99_openstack(b79a775e-2204-42bd-9679-e95e4843b91f)\"" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" podUID="b79a775e-2204-42bd-9679-e95e4843b91f" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.495267 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ef40a4d-a930-428c-a816-ad1afa6d6c04","Type":"ContainerStarted","Data":"6fa84222aaee90ea25b64f983d6c42f09e370c17498163441e22ab995d3f3d7a"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.495307 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ef40a4d-a930-428c-a816-ad1afa6d6c04","Type":"ContainerStarted","Data":"2d767894017ff04106242318b89c67b03fa0c0acbae08bda92f15c1f11d2b095"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.499202 4916 generic.go:334] "Generic (PLEG): container finished" podID="97f0e085-66c0-48ab-b023-6a4d50e08683" containerID="179b160895dee461f15502b215cfd6dfc04c096f3b758bab8073ab8293291d2c" exitCode=0 Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.499416 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" event={"ID":"97f0e085-66c0-48ab-b023-6a4d50e08683","Type":"ContainerDied","Data":"179b160895dee461f15502b215cfd6dfc04c096f3b758bab8073ab8293291d2c"} Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.548755 4916 scope.go:117] "RemoveContainer" containerID="6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1" Dec 03 19:50:35 crc kubenswrapper[4916]: E1203 19:50:35.550238 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1\": container with ID starting with 6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1 not found: ID does not exist" containerID="6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.550267 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1"} err="failed to get container status \"6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1\": rpc error: code = NotFound desc = could not find container \"6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1\": container with ID starting with 6164f03703a9242122a8ea1ac2106f6de3ac516b96f4980d8875e7f202d7fcd1 not found: ID does not exist" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.550286 4916 scope.go:117] "RemoveContainer" containerID="f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.558413 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-556b54b4c6-vrtbp"] Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.567299 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-556b54b4c6-vrtbp"] Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.573964 4916 scope.go:117] "RemoveContainer" containerID="f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887" Dec 03 19:50:35 crc kubenswrapper[4916]: E1203 19:50:35.575987 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887\": container with ID starting with f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887 not found: ID does not exist" containerID="f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.576032 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887"} err="failed to get container status \"f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887\": rpc error: code = NotFound desc = could not find container \"f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887\": container with ID starting with f422c2f3a089236be90c1b96869ce95e2d4189b111117c7b0f7c63ece16ca887 not found: ID does not exist" Dec 03 19:50:35 crc kubenswrapper[4916]: I1203 19:50:35.576058 4916 scope.go:117] "RemoveContainer" containerID="146bb7223e772a676a1f226562187d02c458960d420b9996fb84ec74333797c1" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.004037 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mjjt5" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.087854 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5sdxp" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.097998 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klwfm\" (UniqueName: \"kubernetes.io/projected/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-kube-api-access-klwfm\") pod \"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd\" (UID: \"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd\") " Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.098069 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-operator-scripts\") pod \"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd\" (UID: \"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd\") " Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.099547 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8bs7m" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.103994 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7a20edfd-dcdf-4b70-aa9a-c930b6210dcd" (UID: "7a20edfd-dcdf-4b70-aa9a-c930b6210dcd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.109772 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-kube-api-access-klwfm" (OuterVolumeSpecName: "kube-api-access-klwfm") pod "7a20edfd-dcdf-4b70-aa9a-c930b6210dcd" (UID: "7a20edfd-dcdf-4b70-aa9a-c930b6210dcd"). InnerVolumeSpecName "kube-api-access-klwfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.200145 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/523ba5d4-842e-4726-8b66-813508d7a9d2-operator-scripts\") pod \"523ba5d4-842e-4726-8b66-813508d7a9d2\" (UID: \"523ba5d4-842e-4726-8b66-813508d7a9d2\") " Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.200223 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82lrn\" (UniqueName: \"kubernetes.io/projected/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-kube-api-access-82lrn\") pod \"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e\" (UID: \"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e\") " Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.200807 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-operator-scripts\") pod \"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e\" (UID: \"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e\") " Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.200841 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/523ba5d4-842e-4726-8b66-813508d7a9d2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "523ba5d4-842e-4726-8b66-813508d7a9d2" (UID: "523ba5d4-842e-4726-8b66-813508d7a9d2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.200893 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bthlm\" (UniqueName: \"kubernetes.io/projected/523ba5d4-842e-4726-8b66-813508d7a9d2-kube-api-access-bthlm\") pod \"523ba5d4-842e-4726-8b66-813508d7a9d2\" (UID: \"523ba5d4-842e-4726-8b66-813508d7a9d2\") " Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.201213 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e" (UID: "cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.201724 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.201750 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/523ba5d4-842e-4726-8b66-813508d7a9d2-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.201760 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klwfm\" (UniqueName: \"kubernetes.io/projected/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-kube-api-access-klwfm\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.201771 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.204341 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-kube-api-access-82lrn" (OuterVolumeSpecName: "kube-api-access-82lrn") pod "cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e" (UID: "cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e"). InnerVolumeSpecName "kube-api-access-82lrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.204933 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/523ba5d4-842e-4726-8b66-813508d7a9d2-kube-api-access-bthlm" (OuterVolumeSpecName: "kube-api-access-bthlm") pod "523ba5d4-842e-4726-8b66-813508d7a9d2" (UID: "523ba5d4-842e-4726-8b66-813508d7a9d2"). InnerVolumeSpecName "kube-api-access-bthlm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.303140 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bthlm\" (UniqueName: \"kubernetes.io/projected/523ba5d4-842e-4726-8b66-813508d7a9d2-kube-api-access-bthlm\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.303171 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82lrn\" (UniqueName: \"kubernetes.io/projected/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e-kube-api-access-82lrn\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.489151 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5828ebe5-f058-464d-bb8a-1846217aa15d" path="/var/lib/kubelet/pods/5828ebe5-f058-464d-bb8a-1846217aa15d/volumes" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.522386 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-8bs7m" event={"ID":"523ba5d4-842e-4726-8b66-813508d7a9d2","Type":"ContainerDied","Data":"fdb671283360b149469ccefc74eb1608abe1a8e40faa4ee5194bcf7cbfbe2e8f"} Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.522445 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdb671283360b149469ccefc74eb1608abe1a8e40faa4ee5194bcf7cbfbe2e8f" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.522532 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-8bs7m" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.523832 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5sdxp" event={"ID":"cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e","Type":"ContainerDied","Data":"87727ab61e5170dd5f075d79e8483c505a87dc1f7c617d3d09a38658b77eec12"} Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.523879 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87727ab61e5170dd5f075d79e8483c505a87dc1f7c617d3d09a38658b77eec12" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.523987 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5sdxp" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.529620 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-mjjt5" event={"ID":"7a20edfd-dcdf-4b70-aa9a-c930b6210dcd","Type":"ContainerDied","Data":"50ac72976dce9e8a1c34f2541096f8b24a3c7fdc5f8865fb9b088de544031524"} Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.532602 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50ac72976dce9e8a1c34f2541096f8b24a3c7fdc5f8865fb9b088de544031524" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.531032 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-mjjt5" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.532917 4916 scope.go:117] "RemoveContainer" containerID="262d036420dab0d05017f8112f29f52bddb9746ee2f9bf55e99cba23dab87434" Dec 03 19:50:36 crc kubenswrapper[4916]: E1203 19:50:36.533302 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-c989dd47c-7njt7_openstack(5fb36950-9cbe-4a60-ac1b-5ce9d555b265)\"" pod="openstack/heat-api-c989dd47c-7njt7" podUID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" Dec 03 19:50:36 crc kubenswrapper[4916]: I1203 19:50:36.541489 4916 scope.go:117] "RemoveContainer" containerID="337b1ff6a116f4bcd3f5c6b0dbb302c7ffed021d7e044b8f298e057fc4b93aab" Dec 03 19:50:36 crc kubenswrapper[4916]: E1203 19:50:36.541827 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-644d4d84cb-x7l99_openstack(b79a775e-2204-42bd-9679-e95e4843b91f)\"" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" podUID="b79a775e-2204-42bd-9679-e95e4843b91f" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.131405 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4786-account-create-update-fl47z" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.136387 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6491-account-create-update-rn5pw" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.149584 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.230251 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tlgl\" (UniqueName: \"kubernetes.io/projected/97f0e085-66c0-48ab-b023-6a4d50e08683-kube-api-access-6tlgl\") pod \"97f0e085-66c0-48ab-b023-6a4d50e08683\" (UID: \"97f0e085-66c0-48ab-b023-6a4d50e08683\") " Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.230367 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/482fafd3-d1fd-4235-888d-aa645bdaa1e3-operator-scripts\") pod \"482fafd3-d1fd-4235-888d-aa645bdaa1e3\" (UID: \"482fafd3-d1fd-4235-888d-aa645bdaa1e3\") " Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.230423 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dv57k\" (UniqueName: \"kubernetes.io/projected/482fafd3-d1fd-4235-888d-aa645bdaa1e3-kube-api-access-dv57k\") pod \"482fafd3-d1fd-4235-888d-aa645bdaa1e3\" (UID: \"482fafd3-d1fd-4235-888d-aa645bdaa1e3\") " Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.230459 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97f0e085-66c0-48ab-b023-6a4d50e08683-operator-scripts\") pod \"97f0e085-66c0-48ab-b023-6a4d50e08683\" (UID: \"97f0e085-66c0-48ab-b023-6a4d50e08683\") " Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.230498 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-operator-scripts\") pod \"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a\" (UID: \"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a\") " Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.230537 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvvq8\" (UniqueName: \"kubernetes.io/projected/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-kube-api-access-dvvq8\") pod \"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a\" (UID: \"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a\") " Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.236095 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97f0e085-66c0-48ab-b023-6a4d50e08683-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "97f0e085-66c0-48ab-b023-6a4d50e08683" (UID: "97f0e085-66c0-48ab-b023-6a4d50e08683"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.236101 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/482fafd3-d1fd-4235-888d-aa645bdaa1e3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "482fafd3-d1fd-4235-888d-aa645bdaa1e3" (UID: "482fafd3-d1fd-4235-888d-aa645bdaa1e3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.236424 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f5555e47-dacf-4ef9-80a4-a1bbd57dca1a" (UID: "f5555e47-dacf-4ef9-80a4-a1bbd57dca1a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.239163 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/482fafd3-d1fd-4235-888d-aa645bdaa1e3-kube-api-access-dv57k" (OuterVolumeSpecName: "kube-api-access-dv57k") pod "482fafd3-d1fd-4235-888d-aa645bdaa1e3" (UID: "482fafd3-d1fd-4235-888d-aa645bdaa1e3"). InnerVolumeSpecName "kube-api-access-dv57k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.243167 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97f0e085-66c0-48ab-b023-6a4d50e08683-kube-api-access-6tlgl" (OuterVolumeSpecName: "kube-api-access-6tlgl") pod "97f0e085-66c0-48ab-b023-6a4d50e08683" (UID: "97f0e085-66c0-48ab-b023-6a4d50e08683"). InnerVolumeSpecName "kube-api-access-6tlgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.247820 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-kube-api-access-dvvq8" (OuterVolumeSpecName: "kube-api-access-dvvq8") pod "f5555e47-dacf-4ef9-80a4-a1bbd57dca1a" (UID: "f5555e47-dacf-4ef9-80a4-a1bbd57dca1a"). InnerVolumeSpecName "kube-api-access-dvvq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.333280 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tlgl\" (UniqueName: \"kubernetes.io/projected/97f0e085-66c0-48ab-b023-6a4d50e08683-kube-api-access-6tlgl\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.333611 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/482fafd3-d1fd-4235-888d-aa645bdaa1e3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.333620 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dv57k\" (UniqueName: \"kubernetes.io/projected/482fafd3-d1fd-4235-888d-aa645bdaa1e3-kube-api-access-dv57k\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.333629 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97f0e085-66c0-48ab-b023-6a4d50e08683-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.333638 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.333647 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvvq8\" (UniqueName: \"kubernetes.io/projected/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a-kube-api-access-dvvq8\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.548647 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-4786-account-create-update-fl47z" event={"ID":"f5555e47-dacf-4ef9-80a4-a1bbd57dca1a","Type":"ContainerDied","Data":"6b28128abc77cf4313aa987bd65d13e914ab922397e374eeb3a3b0efd15893c6"} Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.548700 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b28128abc77cf4313aa987bd65d13e914ab922397e374eeb3a3b0efd15893c6" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.548667 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-4786-account-create-update-fl47z" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.552046 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ef40a4d-a930-428c-a816-ad1afa6d6c04","Type":"ContainerStarted","Data":"fe07e7f9b2713593652e751d1206e7db8b7d636fe7f6965bd9c7283f887dfe60"} Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.552140 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="ceilometer-central-agent" containerID="cri-o://083e30ff46487b6262a4d0df4d8eacd55634dbc1c6efd709527b80a28d12cfd4" gracePeriod=30 Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.552186 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.552219 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="proxy-httpd" containerID="cri-o://fe07e7f9b2713593652e751d1206e7db8b7d636fe7f6965bd9c7283f887dfe60" gracePeriod=30 Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.552258 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="sg-core" containerID="cri-o://6fa84222aaee90ea25b64f983d6c42f09e370c17498163441e22ab995d3f3d7a" gracePeriod=30 Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.552294 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="ceilometer-notification-agent" containerID="cri-o://2d767894017ff04106242318b89c67b03fa0c0acbae08bda92f15c1f11d2b095" gracePeriod=30 Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.560866 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6491-account-create-update-rn5pw" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.560847 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6491-account-create-update-rn5pw" event={"ID":"482fafd3-d1fd-4235-888d-aa645bdaa1e3","Type":"ContainerDied","Data":"0ec17cee47bf5969d007e085785f5fd07eb7ae104786d0ccadf534afcdaf845a"} Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.562196 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ec17cee47bf5969d007e085785f5fd07eb7ae104786d0ccadf534afcdaf845a" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.564949 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" event={"ID":"97f0e085-66c0-48ab-b023-6a4d50e08683","Type":"ContainerDied","Data":"98a36c675233af04d32dc580c094ebc6c40c1860e2db5eef1d9967edac7371e2"} Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.564970 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98a36c675233af04d32dc580c094ebc6c40c1860e2db5eef1d9967edac7371e2" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.565017 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e24b-account-create-update-fcl4t" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.565789 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.590094 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.6014842489999999 podStartE2EDuration="9.590080557s" podCreationTimestamp="2025-12-03 19:50:28 +0000 UTC" firstStartedPulling="2025-12-03 19:50:29.211855579 +0000 UTC m=+1245.174665855" lastFinishedPulling="2025-12-03 19:50:37.200451897 +0000 UTC m=+1253.163262163" observedRunningTime="2025-12-03 19:50:37.587023595 +0000 UTC m=+1253.549833861" watchObservedRunningTime="2025-12-03 19:50:37.590080557 +0000 UTC m=+1253.552890823" Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.655370 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-hzm9s"] Dec 03 19:50:37 crc kubenswrapper[4916]: I1203 19:50:37.655619 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" podUID="87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" containerName="dnsmasq-dns" containerID="cri-o://4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8" gracePeriod=10 Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.180134 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.250325 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqz42\" (UniqueName: \"kubernetes.io/projected/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-kube-api-access-vqz42\") pod \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.250392 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-config\") pod \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.250542 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-sb\") pod \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.250647 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-nb\") pod \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.250676 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-swift-storage-0\") pod \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.250799 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-svc\") pod \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\" (UID: \"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.264889 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-kube-api-access-vqz42" (OuterVolumeSpecName: "kube-api-access-vqz42") pod "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" (UID: "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d"). InnerVolumeSpecName "kube-api-access-vqz42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.321451 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" (UID: "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.327602 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" (UID: "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.328909 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" (UID: "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.334025 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-config" (OuterVolumeSpecName: "config") pod "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" (UID: "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.346395 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" (UID: "87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.352535 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.352581 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.352591 4916 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.352603 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.352612 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqz42\" (UniqueName: \"kubernetes.io/projected/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-kube-api-access-vqz42\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.352620 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.578419 4916 generic.go:334] "Generic (PLEG): container finished" podID="87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" containerID="4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8" exitCode=0 Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.578489 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" event={"ID":"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d","Type":"ContainerDied","Data":"4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8"} Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.578520 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" event={"ID":"87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d","Type":"ContainerDied","Data":"15080ca14742f37b1f2568f5c62d7ae5415500d6e349ef9ad4936b4587a6addd"} Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.578538 4916 scope.go:117] "RemoveContainer" containerID="4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.578703 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-hzm9s" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.583660 4916 generic.go:334] "Generic (PLEG): container finished" podID="4fe190f9-3a33-4b45-809e-1bbff64ab3fb" containerID="c78345133e3de8f4802024621da8b100d7e6373e8ee6ed90578fe4f5f58f9fbb" exitCode=0 Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.583718 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4fe190f9-3a33-4b45-809e-1bbff64ab3fb","Type":"ContainerDied","Data":"c78345133e3de8f4802024621da8b100d7e6373e8ee6ed90578fe4f5f58f9fbb"} Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.586994 4916 generic.go:334] "Generic (PLEG): container finished" podID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerID="6fa84222aaee90ea25b64f983d6c42f09e370c17498163441e22ab995d3f3d7a" exitCode=2 Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.587026 4916 generic.go:334] "Generic (PLEG): container finished" podID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerID="2d767894017ff04106242318b89c67b03fa0c0acbae08bda92f15c1f11d2b095" exitCode=0 Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.587049 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ef40a4d-a930-428c-a816-ad1afa6d6c04","Type":"ContainerDied","Data":"6fa84222aaee90ea25b64f983d6c42f09e370c17498163441e22ab995d3f3d7a"} Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.587077 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ef40a4d-a930-428c-a816-ad1afa6d6c04","Type":"ContainerDied","Data":"2d767894017ff04106242318b89c67b03fa0c0acbae08bda92f15c1f11d2b095"} Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.601142 4916 scope.go:117] "RemoveContainer" containerID="0a9df916a95089eeae26a6141af41114e2cab992b21bf3db343a0eea435ee1ea" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.603723 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-hzm9s"] Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.620515 4916 scope.go:117] "RemoveContainer" containerID="4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8" Dec 03 19:50:38 crc kubenswrapper[4916]: E1203 19:50:38.621086 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8\": container with ID starting with 4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8 not found: ID does not exist" containerID="4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.621142 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8"} err="failed to get container status \"4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8\": rpc error: code = NotFound desc = could not find container \"4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8\": container with ID starting with 4be06e7538d32466ae791b07efc5072d1c5b65c403866cb1c8bf2b2c77699df8 not found: ID does not exist" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.621186 4916 scope.go:117] "RemoveContainer" containerID="0a9df916a95089eeae26a6141af41114e2cab992b21bf3db343a0eea435ee1ea" Dec 03 19:50:38 crc kubenswrapper[4916]: E1203 19:50:38.621408 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a9df916a95089eeae26a6141af41114e2cab992b21bf3db343a0eea435ee1ea\": container with ID starting with 0a9df916a95089eeae26a6141af41114e2cab992b21bf3db343a0eea435ee1ea not found: ID does not exist" containerID="0a9df916a95089eeae26a6141af41114e2cab992b21bf3db343a0eea435ee1ea" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.621428 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a9df916a95089eeae26a6141af41114e2cab992b21bf3db343a0eea435ee1ea"} err="failed to get container status \"0a9df916a95089eeae26a6141af41114e2cab992b21bf3db343a0eea435ee1ea\": rpc error: code = NotFound desc = could not find container \"0a9df916a95089eeae26a6141af41114e2cab992b21bf3db343a0eea435ee1ea\": container with ID starting with 0a9df916a95089eeae26a6141af41114e2cab992b21bf3db343a0eea435ee1ea not found: ID does not exist" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.639928 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-hzm9s"] Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.848853 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.980203 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-scripts\") pod \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.980245 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldnc5\" (UniqueName: \"kubernetes.io/projected/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-kube-api-access-ldnc5\") pod \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.980288 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-logs\") pod \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.980326 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-combined-ca-bundle\") pod \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.980378 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-public-tls-certs\") pod \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.980433 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-httpd-run\") pod \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.980451 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-config-data\") pod \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.980499 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\" (UID: \"4fe190f9-3a33-4b45-809e-1bbff64ab3fb\") " Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.980826 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4fe190f9-3a33-4b45-809e-1bbff64ab3fb" (UID: "4fe190f9-3a33-4b45-809e-1bbff64ab3fb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.981141 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-logs" (OuterVolumeSpecName: "logs") pod "4fe190f9-3a33-4b45-809e-1bbff64ab3fb" (UID: "4fe190f9-3a33-4b45-809e-1bbff64ab3fb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.983442 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-scripts" (OuterVolumeSpecName: "scripts") pod "4fe190f9-3a33-4b45-809e-1bbff64ab3fb" (UID: "4fe190f9-3a33-4b45-809e-1bbff64ab3fb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.984970 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "4fe190f9-3a33-4b45-809e-1bbff64ab3fb" (UID: "4fe190f9-3a33-4b45-809e-1bbff64ab3fb"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 19:50:38 crc kubenswrapper[4916]: I1203 19:50:38.985133 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-kube-api-access-ldnc5" (OuterVolumeSpecName: "kube-api-access-ldnc5") pod "4fe190f9-3a33-4b45-809e-1bbff64ab3fb" (UID: "4fe190f9-3a33-4b45-809e-1bbff64ab3fb"). InnerVolumeSpecName "kube-api-access-ldnc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.021259 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fe190f9-3a33-4b45-809e-1bbff64ab3fb" (UID: "4fe190f9-3a33-4b45-809e-1bbff64ab3fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.049302 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-config-data" (OuterVolumeSpecName: "config-data") pod "4fe190f9-3a33-4b45-809e-1bbff64ab3fb" (UID: "4fe190f9-3a33-4b45-809e-1bbff64ab3fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.073089 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4fe190f9-3a33-4b45-809e-1bbff64ab3fb" (UID: "4fe190f9-3a33-4b45-809e-1bbff64ab3fb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.082142 4916 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.082176 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.082185 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldnc5\" (UniqueName: \"kubernetes.io/projected/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-kube-api-access-ldnc5\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.082196 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.082206 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.082216 4916 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.082225 4916 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.082233 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fe190f9-3a33-4b45-809e-1bbff64ab3fb-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.107589 4916 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.130580 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.131335 4916 scope.go:117] "RemoveContainer" containerID="262d036420dab0d05017f8112f29f52bddb9746ee2f9bf55e99cba23dab87434" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.131595 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-c989dd47c-7njt7_openstack(5fb36950-9cbe-4a60-ac1b-5ce9d555b265)\"" pod="openstack/heat-api-c989dd47c-7njt7" podUID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.131975 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.142962 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.143477 4916 scope.go:117] "RemoveContainer" containerID="337b1ff6a116f4bcd3f5c6b0dbb302c7ffed021d7e044b8f298e057fc4b93aab" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.143680 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-644d4d84cb-x7l99_openstack(b79a775e-2204-42bd-9679-e95e4843b91f)\"" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" podUID="b79a775e-2204-42bd-9679-e95e4843b91f" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.143926 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.183670 4916 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.598165 4916 generic.go:334] "Generic (PLEG): container finished" podID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerID="083e30ff46487b6262a4d0df4d8eacd55634dbc1c6efd709527b80a28d12cfd4" exitCode=0 Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.598265 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ef40a4d-a930-428c-a816-ad1afa6d6c04","Type":"ContainerDied","Data":"083e30ff46487b6262a4d0df4d8eacd55634dbc1c6efd709527b80a28d12cfd4"} Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.602311 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4fe190f9-3a33-4b45-809e-1bbff64ab3fb","Type":"ContainerDied","Data":"683deb5e7c7d7e5ed32104cd334949b1fec52decc86df1ba819baf18a9fb524b"} Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.602347 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.602384 4916 scope.go:117] "RemoveContainer" containerID="c78345133e3de8f4802024621da8b100d7e6373e8ee6ed90578fe4f5f58f9fbb" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.602912 4916 scope.go:117] "RemoveContainer" containerID="262d036420dab0d05017f8112f29f52bddb9746ee2f9bf55e99cba23dab87434" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.603122 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-api pod=heat-api-c989dd47c-7njt7_openstack(5fb36950-9cbe-4a60-ac1b-5ce9d555b265)\"" pod="openstack/heat-api-c989dd47c-7njt7" podUID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.603164 4916 scope.go:117] "RemoveContainer" containerID="337b1ff6a116f4bcd3f5c6b0dbb302c7ffed021d7e044b8f298e057fc4b93aab" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.603559 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"heat-cfnapi\" with CrashLoopBackOff: \"back-off 10s restarting failed container=heat-cfnapi pod=heat-cfnapi-644d4d84cb-x7l99_openstack(b79a775e-2204-42bd-9679-e95e4843b91f)\"" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" podUID="b79a775e-2204-42bd-9679-e95e4843b91f" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.636040 4916 scope.go:117] "RemoveContainer" containerID="3e204a8da2a04a4e00c481245b1b2d3acbb6b5af234256168fe1f97d4922f9df" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.652725 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.665575 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697283 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.697649 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5828ebe5-f058-464d-bb8a-1846217aa15d" containerName="heat-api" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697665 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5828ebe5-f058-464d-bb8a-1846217aa15d" containerName="heat-api" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.697676 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e" containerName="mariadb-database-create" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697683 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e" containerName="mariadb-database-create" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.697693 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97f0e085-66c0-48ab-b023-6a4d50e08683" containerName="mariadb-account-create-update" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697699 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="97f0e085-66c0-48ab-b023-6a4d50e08683" containerName="mariadb-account-create-update" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.697711 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a20edfd-dcdf-4b70-aa9a-c930b6210dcd" containerName="mariadb-database-create" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697717 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a20edfd-dcdf-4b70-aa9a-c930b6210dcd" containerName="mariadb-database-create" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.697730 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fe190f9-3a33-4b45-809e-1bbff64ab3fb" containerName="glance-httpd" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697735 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fe190f9-3a33-4b45-809e-1bbff64ab3fb" containerName="glance-httpd" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.697747 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" containerName="init" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697752 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" containerName="init" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.697761 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="482fafd3-d1fd-4235-888d-aa645bdaa1e3" containerName="mariadb-account-create-update" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697766 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="482fafd3-d1fd-4235-888d-aa645bdaa1e3" containerName="mariadb-account-create-update" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.697781 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fe190f9-3a33-4b45-809e-1bbff64ab3fb" containerName="glance-log" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697788 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fe190f9-3a33-4b45-809e-1bbff64ab3fb" containerName="glance-log" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.697801 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" containerName="dnsmasq-dns" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697807 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" containerName="dnsmasq-dns" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.697820 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5555e47-dacf-4ef9-80a4-a1bbd57dca1a" containerName="mariadb-account-create-update" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697827 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5555e47-dacf-4ef9-80a4-a1bbd57dca1a" containerName="mariadb-account-create-update" Dec 03 19:50:39 crc kubenswrapper[4916]: E1203 19:50:39.697837 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="523ba5d4-842e-4726-8b66-813508d7a9d2" containerName="mariadb-database-create" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697842 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="523ba5d4-842e-4726-8b66-813508d7a9d2" containerName="mariadb-database-create" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.697995 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" containerName="dnsmasq-dns" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.698007 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="523ba5d4-842e-4726-8b66-813508d7a9d2" containerName="mariadb-database-create" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.698014 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="482fafd3-d1fd-4235-888d-aa645bdaa1e3" containerName="mariadb-account-create-update" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.698029 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5555e47-dacf-4ef9-80a4-a1bbd57dca1a" containerName="mariadb-account-create-update" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.698038 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="97f0e085-66c0-48ab-b023-6a4d50e08683" containerName="mariadb-account-create-update" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.698049 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fe190f9-3a33-4b45-809e-1bbff64ab3fb" containerName="glance-httpd" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.698062 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e" containerName="mariadb-database-create" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.698071 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a20edfd-dcdf-4b70-aa9a-c930b6210dcd" containerName="mariadb-database-create" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.698084 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="5828ebe5-f058-464d-bb8a-1846217aa15d" containerName="heat-api" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.698093 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fe190f9-3a33-4b45-809e-1bbff64ab3fb" containerName="glance-log" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.698968 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.702360 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.706402 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.716973 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5vk6b"] Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.718178 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.722885 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.722979 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-r5cfq" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.723111 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.739704 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.779642 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5vk6b"] Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.794855 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwqzt\" (UniqueName: \"kubernetes.io/projected/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-kube-api-access-mwqzt\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.794913 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmpnp\" (UniqueName: \"kubernetes.io/projected/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-kube-api-access-wmpnp\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.794938 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.794964 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-logs\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.794984 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.795009 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-config-data\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.795035 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-config-data\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.795067 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.795083 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.795100 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-scripts\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.795118 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.795147 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-scripts\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.896726 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.896770 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.896790 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-scripts\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.896810 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.896843 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-scripts\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.896904 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwqzt\" (UniqueName: \"kubernetes.io/projected/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-kube-api-access-mwqzt\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.896934 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmpnp\" (UniqueName: \"kubernetes.io/projected/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-kube-api-access-wmpnp\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.896957 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.896980 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-logs\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.896999 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.897034 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-config-data\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.897062 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-config-data\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.897113 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.897244 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.897624 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-logs\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.904213 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-config-data\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.904239 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.906177 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.908922 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.909127 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-config-data\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.914547 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-scripts\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.917626 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-scripts\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.917951 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwqzt\" (UniqueName: \"kubernetes.io/projected/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-kube-api-access-mwqzt\") pod \"nova-cell0-conductor-db-sync-5vk6b\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.926648 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmpnp\" (UniqueName: \"kubernetes.io/projected/09b23ba0-7111-4c00-9ecc-a4ea541b3ca4-kube-api-access-wmpnp\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:39 crc kubenswrapper[4916]: I1203 19:50:39.943481 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4\") " pod="openstack/glance-default-external-api-0" Dec 03 19:50:40 crc kubenswrapper[4916]: I1203 19:50:40.020371 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 19:50:40 crc kubenswrapper[4916]: I1203 19:50:40.039301 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:50:40 crc kubenswrapper[4916]: I1203 19:50:40.486602 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fe190f9-3a33-4b45-809e-1bbff64ab3fb" path="/var/lib/kubelet/pods/4fe190f9-3a33-4b45-809e-1bbff64ab3fb/volumes" Dec 03 19:50:40 crc kubenswrapper[4916]: I1203 19:50:40.487899 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d" path="/var/lib/kubelet/pods/87ff7ec0-4c1d-46c2-a7aa-ed52b6e5482d/volumes" Dec 03 19:50:40 crc kubenswrapper[4916]: I1203 19:50:40.530545 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5vk6b"] Dec 03 19:50:40 crc kubenswrapper[4916]: I1203 19:50:40.611434 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5vk6b" event={"ID":"05ab20e9-de0e-4f40-aa4b-a3b685fe9712","Type":"ContainerStarted","Data":"e76725df7cf54bea7dc0a3eb9b7b159667f504b30386455dc53af766cb9e1a8c"} Dec 03 19:50:40 crc kubenswrapper[4916]: I1203 19:50:40.708139 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 19:50:40 crc kubenswrapper[4916]: W1203 19:50:40.718730 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09b23ba0_7111_4c00_9ecc_a4ea541b3ca4.slice/crio-a0baef4f3c060e19e01fb0538e56bdc3105e3c291c3849d30c22efd5d60f2a26 WatchSource:0}: Error finding container a0baef4f3c060e19e01fb0538e56bdc3105e3c291c3849d30c22efd5d60f2a26: Status 404 returned error can't find the container with id a0baef4f3c060e19e01fb0538e56bdc3105e3c291c3849d30c22efd5d60f2a26 Dec 03 19:50:41 crc kubenswrapper[4916]: I1203 19:50:41.506008 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:50:41 crc kubenswrapper[4916]: I1203 19:50:41.506534 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8d15f4cc-451e-4898-9125-a2ad4f229e3d" containerName="glance-log" containerID="cri-o://2c5d60658a726176a4518d7247e35b98c1e3353c2719e48484db0fe06770760d" gracePeriod=30 Dec 03 19:50:41 crc kubenswrapper[4916]: I1203 19:50:41.507009 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8d15f4cc-451e-4898-9125-a2ad4f229e3d" containerName="glance-httpd" containerID="cri-o://52ac206e19a957b4b35869ab6f3dd919d94e86246741828c8bb387e27574fa3a" gracePeriod=30 Dec 03 19:50:41 crc kubenswrapper[4916]: I1203 19:50:41.632579 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4","Type":"ContainerStarted","Data":"1a4dbf8da1bb414a991ee22958bada71a1247571a3af3cfeabf89b2dc67c8d11"} Dec 03 19:50:41 crc kubenswrapper[4916]: I1203 19:50:41.632622 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4","Type":"ContainerStarted","Data":"a0baef4f3c060e19e01fb0538e56bdc3105e3c291c3849d30c22efd5d60f2a26"} Dec 03 19:50:42 crc kubenswrapper[4916]: I1203 19:50:42.488087 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:42 crc kubenswrapper[4916]: I1203 19:50:42.644210 4916 generic.go:334] "Generic (PLEG): container finished" podID="8d15f4cc-451e-4898-9125-a2ad4f229e3d" containerID="2c5d60658a726176a4518d7247e35b98c1e3353c2719e48484db0fe06770760d" exitCode=143 Dec 03 19:50:42 crc kubenswrapper[4916]: I1203 19:50:42.644293 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d15f4cc-451e-4898-9125-a2ad4f229e3d","Type":"ContainerDied","Data":"2c5d60658a726176a4518d7247e35b98c1e3353c2719e48484db0fe06770760d"} Dec 03 19:50:42 crc kubenswrapper[4916]: I1203 19:50:42.647978 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"09b23ba0-7111-4c00-9ecc-a4ea541b3ca4","Type":"ContainerStarted","Data":"ea741ef0a1d16d65b5fae72b1752e4dfaca966980bbfe2b3d00ac4d3d18a6e23"} Dec 03 19:50:42 crc kubenswrapper[4916]: I1203 19:50:42.679977 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.67995836 podStartE2EDuration="3.67995836s" podCreationTimestamp="2025-12-03 19:50:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:42.674467474 +0000 UTC m=+1258.637277740" watchObservedRunningTime="2025-12-03 19:50:42.67995836 +0000 UTC m=+1258.642768646" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.166539 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-56c49bcc9c-497gn" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.216532 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-c989dd47c-7njt7"] Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.496320 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-84fcbd5864-k72dj" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.541127 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-644d4d84cb-x7l99"] Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.621612 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.664577 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-c989dd47c-7njt7" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.665112 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-c989dd47c-7njt7" event={"ID":"5fb36950-9cbe-4a60-ac1b-5ce9d555b265","Type":"ContainerDied","Data":"4e2802f80e30e6465abbf5c368d24b821d0b71323f19f3c598f43c177fe70c00"} Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.665147 4916 scope.go:117] "RemoveContainer" containerID="262d036420dab0d05017f8112f29f52bddb9746ee2f9bf55e99cba23dab87434" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.797712 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-combined-ca-bundle\") pod \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.797767 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrt8m\" (UniqueName: \"kubernetes.io/projected/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-kube-api-access-wrt8m\") pod \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.797805 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data\") pod \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.797921 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data-custom\") pod \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\" (UID: \"5fb36950-9cbe-4a60-ac1b-5ce9d555b265\") " Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.809285 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-kube-api-access-wrt8m" (OuterVolumeSpecName: "kube-api-access-wrt8m") pod "5fb36950-9cbe-4a60-ac1b-5ce9d555b265" (UID: "5fb36950-9cbe-4a60-ac1b-5ce9d555b265"). InnerVolumeSpecName "kube-api-access-wrt8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.809944 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5fb36950-9cbe-4a60-ac1b-5ce9d555b265" (UID: "5fb36950-9cbe-4a60-ac1b-5ce9d555b265"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.840488 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5fb36950-9cbe-4a60-ac1b-5ce9d555b265" (UID: "5fb36950-9cbe-4a60-ac1b-5ce9d555b265"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.876273 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data" (OuterVolumeSpecName: "config-data") pod "5fb36950-9cbe-4a60-ac1b-5ce9d555b265" (UID: "5fb36950-9cbe-4a60-ac1b-5ce9d555b265"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.900065 4916 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.900277 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.900324 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrt8m\" (UniqueName: \"kubernetes.io/projected/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-kube-api-access-wrt8m\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.900338 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fb36950-9cbe-4a60-ac1b-5ce9d555b265-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:43 crc kubenswrapper[4916]: I1203 19:50:43.922839 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.001704 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-api-c989dd47c-7njt7"] Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.010979 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-api-c989dd47c-7njt7"] Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.102388 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data-custom\") pod \"b79a775e-2204-42bd-9679-e95e4843b91f\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.102609 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-combined-ca-bundle\") pod \"b79a775e-2204-42bd-9679-e95e4843b91f\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.102672 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data\") pod \"b79a775e-2204-42bd-9679-e95e4843b91f\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.102708 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-478gs\" (UniqueName: \"kubernetes.io/projected/b79a775e-2204-42bd-9679-e95e4843b91f-kube-api-access-478gs\") pod \"b79a775e-2204-42bd-9679-e95e4843b91f\" (UID: \"b79a775e-2204-42bd-9679-e95e4843b91f\") " Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.110908 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b79a775e-2204-42bd-9679-e95e4843b91f-kube-api-access-478gs" (OuterVolumeSpecName: "kube-api-access-478gs") pod "b79a775e-2204-42bd-9679-e95e4843b91f" (UID: "b79a775e-2204-42bd-9679-e95e4843b91f"). InnerVolumeSpecName "kube-api-access-478gs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.111969 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b79a775e-2204-42bd-9679-e95e4843b91f" (UID: "b79a775e-2204-42bd-9679-e95e4843b91f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.138384 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b79a775e-2204-42bd-9679-e95e4843b91f" (UID: "b79a775e-2204-42bd-9679-e95e4843b91f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.163712 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data" (OuterVolumeSpecName: "config-data") pod "b79a775e-2204-42bd-9679-e95e4843b91f" (UID: "b79a775e-2204-42bd-9679-e95e4843b91f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.205117 4916 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.205156 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.205166 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b79a775e-2204-42bd-9679-e95e4843b91f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.205175 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-478gs\" (UniqueName: \"kubernetes.io/projected/b79a775e-2204-42bd-9679-e95e4843b91f-kube-api-access-478gs\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.436629 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.497960 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" path="/var/lib/kubelet/pods/5fb36950-9cbe-4a60-ac1b-5ce9d555b265/volumes" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.678125 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" event={"ID":"b79a775e-2204-42bd-9679-e95e4843b91f","Type":"ContainerDied","Data":"e003bfed3d6d50289e8ba310f0f5600391c1503db610beef01b4d82a449e839b"} Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.678459 4916 scope.go:117] "RemoveContainer" containerID="337b1ff6a116f4bcd3f5c6b0dbb302c7ffed021d7e044b8f298e057fc4b93aab" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.678342 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-644d4d84cb-x7l99" Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.763541 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-644d4d84cb-x7l99"] Dec 03 19:50:44 crc kubenswrapper[4916]: I1203 19:50:44.770144 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-644d4d84cb-x7l99"] Dec 03 19:50:45 crc kubenswrapper[4916]: I1203 19:50:45.704643 4916 generic.go:334] "Generic (PLEG): container finished" podID="8d15f4cc-451e-4898-9125-a2ad4f229e3d" containerID="52ac206e19a957b4b35869ab6f3dd919d94e86246741828c8bb387e27574fa3a" exitCode=0 Dec 03 19:50:45 crc kubenswrapper[4916]: I1203 19:50:45.704884 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d15f4cc-451e-4898-9125-a2ad4f229e3d","Type":"ContainerDied","Data":"52ac206e19a957b4b35869ab6f3dd919d94e86246741828c8bb387e27574fa3a"} Dec 03 19:50:46 crc kubenswrapper[4916]: I1203 19:50:46.158388 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:50:46 crc kubenswrapper[4916]: I1203 19:50:46.158439 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:50:46 crc kubenswrapper[4916]: I1203 19:50:46.497361 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b79a775e-2204-42bd-9679-e95e4843b91f" path="/var/lib/kubelet/pods/b79a775e-2204-42bd-9679-e95e4843b91f/volumes" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.123627 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-7b767dc896-5v8nl" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.200977 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-6dd8857784-86hhw"] Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.201336 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/heat-engine-6dd8857784-86hhw" podUID="3743884c-79ea-47d6-ad97-92d235fd5a98" containerName="heat-engine" containerID="cri-o://2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8" gracePeriod=60 Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.726305 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.762169 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8d15f4cc-451e-4898-9125-a2ad4f229e3d","Type":"ContainerDied","Data":"a06a0e3cb762004a595c95fee45c0c7b106948b82042f44cb0db525dd84e72bf"} Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.762194 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.762237 4916 scope.go:117] "RemoveContainer" containerID="52ac206e19a957b4b35869ab6f3dd919d94e86246741828c8bb387e27574fa3a" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.765498 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5vk6b" event={"ID":"05ab20e9-de0e-4f40-aa4b-a3b685fe9712","Type":"ContainerStarted","Data":"f8c194e4ca14af6ad3afab72bc55bbf94a95a6bf2536ce315003c0e2ece66735"} Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.795753 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-5vk6b" podStartSLOduration=1.9538471259999999 podStartE2EDuration="10.795733754s" podCreationTimestamp="2025-12-03 19:50:39 +0000 UTC" firstStartedPulling="2025-12-03 19:50:40.53631449 +0000 UTC m=+1256.499124756" lastFinishedPulling="2025-12-03 19:50:49.378201118 +0000 UTC m=+1265.341011384" observedRunningTime="2025-12-03 19:50:49.787921985 +0000 UTC m=+1265.750732241" watchObservedRunningTime="2025-12-03 19:50:49.795733754 +0000 UTC m=+1265.758544020" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.800895 4916 scope.go:117] "RemoveContainer" containerID="2c5d60658a726176a4518d7247e35b98c1e3353c2719e48484db0fe06770760d" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.916250 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.916312 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-config-data\") pod \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.916339 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-internal-tls-certs\") pod \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.916379 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-logs\") pod \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.916396 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kt6ml\" (UniqueName: \"kubernetes.io/projected/8d15f4cc-451e-4898-9125-a2ad4f229e3d-kube-api-access-kt6ml\") pod \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.916500 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-httpd-run\") pod \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.916522 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-scripts\") pod \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.916632 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-combined-ca-bundle\") pod \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\" (UID: \"8d15f4cc-451e-4898-9125-a2ad4f229e3d\") " Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.916783 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-logs" (OuterVolumeSpecName: "logs") pod "8d15f4cc-451e-4898-9125-a2ad4f229e3d" (UID: "8d15f4cc-451e-4898-9125-a2ad4f229e3d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.916966 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8d15f4cc-451e-4898-9125-a2ad4f229e3d" (UID: "8d15f4cc-451e-4898-9125-a2ad4f229e3d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.917031 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.923826 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d15f4cc-451e-4898-9125-a2ad4f229e3d-kube-api-access-kt6ml" (OuterVolumeSpecName: "kube-api-access-kt6ml") pod "8d15f4cc-451e-4898-9125-a2ad4f229e3d" (UID: "8d15f4cc-451e-4898-9125-a2ad4f229e3d"). InnerVolumeSpecName "kube-api-access-kt6ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.923833 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "8d15f4cc-451e-4898-9125-a2ad4f229e3d" (UID: "8d15f4cc-451e-4898-9125-a2ad4f229e3d"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.924787 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-scripts" (OuterVolumeSpecName: "scripts") pod "8d15f4cc-451e-4898-9125-a2ad4f229e3d" (UID: "8d15f4cc-451e-4898-9125-a2ad4f229e3d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.945966 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d15f4cc-451e-4898-9125-a2ad4f229e3d" (UID: "8d15f4cc-451e-4898-9125-a2ad4f229e3d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.965878 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-config-data" (OuterVolumeSpecName: "config-data") pod "8d15f4cc-451e-4898-9125-a2ad4f229e3d" (UID: "8d15f4cc-451e-4898-9125-a2ad4f229e3d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:49 crc kubenswrapper[4916]: I1203 19:50:49.966435 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8d15f4cc-451e-4898-9125-a2ad4f229e3d" (UID: "8d15f4cc-451e-4898-9125-a2ad4f229e3d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.019067 4916 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8d15f4cc-451e-4898-9125-a2ad4f229e3d-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.019191 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.019274 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.019355 4916 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.019420 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.019473 4916 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d15f4cc-451e-4898-9125-a2ad4f229e3d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.019525 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kt6ml\" (UniqueName: \"kubernetes.io/projected/8d15f4cc-451e-4898-9125-a2ad4f229e3d-kube-api-access-kt6ml\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.021387 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.022109 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.040005 4916 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.054289 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.060821 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.120993 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.121956 4916 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.129188 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.151973 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:50:50 crc kubenswrapper[4916]: E1203 19:50:50.152445 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" containerName="heat-api" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152468 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" containerName="heat-api" Dec 03 19:50:50 crc kubenswrapper[4916]: E1203 19:50:50.152501 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" containerName="heat-api" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152512 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" containerName="heat-api" Dec 03 19:50:50 crc kubenswrapper[4916]: E1203 19:50:50.152522 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b79a775e-2204-42bd-9679-e95e4843b91f" containerName="heat-cfnapi" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152528 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b79a775e-2204-42bd-9679-e95e4843b91f" containerName="heat-cfnapi" Dec 03 19:50:50 crc kubenswrapper[4916]: E1203 19:50:50.152541 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b79a775e-2204-42bd-9679-e95e4843b91f" containerName="heat-cfnapi" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152547 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b79a775e-2204-42bd-9679-e95e4843b91f" containerName="heat-cfnapi" Dec 03 19:50:50 crc kubenswrapper[4916]: E1203 19:50:50.152579 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d15f4cc-451e-4898-9125-a2ad4f229e3d" containerName="glance-httpd" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152588 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d15f4cc-451e-4898-9125-a2ad4f229e3d" containerName="glance-httpd" Dec 03 19:50:50 crc kubenswrapper[4916]: E1203 19:50:50.152603 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d15f4cc-451e-4898-9125-a2ad4f229e3d" containerName="glance-log" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152609 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d15f4cc-451e-4898-9125-a2ad4f229e3d" containerName="glance-log" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152779 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d15f4cc-451e-4898-9125-a2ad4f229e3d" containerName="glance-httpd" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152793 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d15f4cc-451e-4898-9125-a2ad4f229e3d" containerName="glance-log" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152809 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" containerName="heat-api" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152820 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fb36950-9cbe-4a60-ac1b-5ce9d555b265" containerName="heat-api" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152830 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="b79a775e-2204-42bd-9679-e95e4843b91f" containerName="heat-cfnapi" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.152838 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="b79a775e-2204-42bd-9679-e95e4843b91f" containerName="heat-cfnapi" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.153815 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.157611 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.157942 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.159217 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.325835 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.326215 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65acea52-6e4e-44c7-9406-bc296db6821b-logs\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.326326 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.326405 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.326522 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/65acea52-6e4e-44c7-9406-bc296db6821b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.326619 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.326700 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.326789 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fb95\" (UniqueName: \"kubernetes.io/projected/65acea52-6e4e-44c7-9406-bc296db6821b-kube-api-access-5fb95\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.428555 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.428669 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.428738 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/65acea52-6e4e-44c7-9406-bc296db6821b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.428759 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.428779 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.428818 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fb95\" (UniqueName: \"kubernetes.io/projected/65acea52-6e4e-44c7-9406-bc296db6821b-kube-api-access-5fb95\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.428862 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.428881 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65acea52-6e4e-44c7-9406-bc296db6821b-logs\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.429875 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.430618 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/65acea52-6e4e-44c7-9406-bc296db6821b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.431267 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65acea52-6e4e-44c7-9406-bc296db6821b-logs\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.435161 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.435253 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.436445 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.441013 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65acea52-6e4e-44c7-9406-bc296db6821b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.450002 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fb95\" (UniqueName: \"kubernetes.io/projected/65acea52-6e4e-44c7-9406-bc296db6821b-kube-api-access-5fb95\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.469661 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"65acea52-6e4e-44c7-9406-bc296db6821b\") " pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.474686 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.500479 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d15f4cc-451e-4898-9125-a2ad4f229e3d" path="/var/lib/kubelet/pods/8d15f4cc-451e-4898-9125-a2ad4f229e3d/volumes" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.774268 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 19:50:50 crc kubenswrapper[4916]: I1203 19:50:50.774627 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 19:50:51 crc kubenswrapper[4916]: I1203 19:50:51.073738 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 19:50:51 crc kubenswrapper[4916]: I1203 19:50:51.784664 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"65acea52-6e4e-44c7-9406-bc296db6821b","Type":"ContainerStarted","Data":"81374ee2bf11effdbdea7e7ec91ed035edc9e589066f3a6caa320095eafdfbae"} Dec 03 19:50:51 crc kubenswrapper[4916]: I1203 19:50:51.784960 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"65acea52-6e4e-44c7-9406-bc296db6821b","Type":"ContainerStarted","Data":"b86e96e14c8aa20305b5782efb9d1bd86af51982b98c6d9708964ecc8ffa875b"} Dec 03 19:50:52 crc kubenswrapper[4916]: E1203 19:50:52.449139 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 03 19:50:52 crc kubenswrapper[4916]: E1203 19:50:52.450665 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 03 19:50:52 crc kubenswrapper[4916]: E1203 19:50:52.451752 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8" cmd=["/usr/bin/pgrep","-r","DRST","heat-engine"] Dec 03 19:50:52 crc kubenswrapper[4916]: E1203 19:50:52.451790 4916 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/heat-engine-6dd8857784-86hhw" podUID="3743884c-79ea-47d6-ad97-92d235fd5a98" containerName="heat-engine" Dec 03 19:50:52 crc kubenswrapper[4916]: I1203 19:50:52.793311 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"65acea52-6e4e-44c7-9406-bc296db6821b","Type":"ContainerStarted","Data":"fd782beef721c460a2a1dddab1caf5ce73fcbf021bd08efb890dd993ca06aad9"} Dec 03 19:50:52 crc kubenswrapper[4916]: I1203 19:50:52.793331 4916 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 19:50:52 crc kubenswrapper[4916]: I1203 19:50:52.793446 4916 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 19:50:52 crc kubenswrapper[4916]: I1203 19:50:52.817495 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.817474997 podStartE2EDuration="2.817474997s" podCreationTimestamp="2025-12-03 19:50:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:50:52.810181442 +0000 UTC m=+1268.772991708" watchObservedRunningTime="2025-12-03 19:50:52.817474997 +0000 UTC m=+1268.780285263" Dec 03 19:50:52 crc kubenswrapper[4916]: I1203 19:50:52.893168 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 19:50:52 crc kubenswrapper[4916]: I1203 19:50:52.898979 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.583415 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.627626 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qcrt\" (UniqueName: \"kubernetes.io/projected/3743884c-79ea-47d6-ad97-92d235fd5a98-kube-api-access-8qcrt\") pod \"3743884c-79ea-47d6-ad97-92d235fd5a98\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.627689 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-combined-ca-bundle\") pod \"3743884c-79ea-47d6-ad97-92d235fd5a98\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.627715 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data\") pod \"3743884c-79ea-47d6-ad97-92d235fd5a98\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.627758 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data-custom\") pod \"3743884c-79ea-47d6-ad97-92d235fd5a98\" (UID: \"3743884c-79ea-47d6-ad97-92d235fd5a98\") " Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.633504 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3743884c-79ea-47d6-ad97-92d235fd5a98" (UID: "3743884c-79ea-47d6-ad97-92d235fd5a98"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.644489 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3743884c-79ea-47d6-ad97-92d235fd5a98-kube-api-access-8qcrt" (OuterVolumeSpecName: "kube-api-access-8qcrt") pod "3743884c-79ea-47d6-ad97-92d235fd5a98" (UID: "3743884c-79ea-47d6-ad97-92d235fd5a98"). InnerVolumeSpecName "kube-api-access-8qcrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.675530 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3743884c-79ea-47d6-ad97-92d235fd5a98" (UID: "3743884c-79ea-47d6-ad97-92d235fd5a98"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.695273 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data" (OuterVolumeSpecName: "config-data") pod "3743884c-79ea-47d6-ad97-92d235fd5a98" (UID: "3743884c-79ea-47d6-ad97-92d235fd5a98"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.730821 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qcrt\" (UniqueName: \"kubernetes.io/projected/3743884c-79ea-47d6-ad97-92d235fd5a98-kube-api-access-8qcrt\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.730857 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.730901 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.730913 4916 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3743884c-79ea-47d6-ad97-92d235fd5a98-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.823208 4916 generic.go:334] "Generic (PLEG): container finished" podID="3743884c-79ea-47d6-ad97-92d235fd5a98" containerID="2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8" exitCode=0 Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.823248 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6dd8857784-86hhw" event={"ID":"3743884c-79ea-47d6-ad97-92d235fd5a98","Type":"ContainerDied","Data":"2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8"} Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.823272 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-6dd8857784-86hhw" event={"ID":"3743884c-79ea-47d6-ad97-92d235fd5a98","Type":"ContainerDied","Data":"a0c5228f04981a6d2fb841b0b325fc01b8729828d87c948c6a28a800530aa858"} Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.823287 4916 scope.go:117] "RemoveContainer" containerID="2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.823381 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-6dd8857784-86hhw" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.864645 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-engine-6dd8857784-86hhw"] Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.868403 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-engine-6dd8857784-86hhw"] Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.872718 4916 scope.go:117] "RemoveContainer" containerID="2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8" Dec 03 19:50:55 crc kubenswrapper[4916]: E1203 19:50:55.873165 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8\": container with ID starting with 2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8 not found: ID does not exist" containerID="2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8" Dec 03 19:50:55 crc kubenswrapper[4916]: I1203 19:50:55.873194 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8"} err="failed to get container status \"2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8\": rpc error: code = NotFound desc = could not find container \"2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8\": container with ID starting with 2d3ae4a31ce4c50be5a9b898f59a36c7cea4621077335accb7db61bfa4ac7fd8 not found: ID does not exist" Dec 03 19:50:56 crc kubenswrapper[4916]: I1203 19:50:56.493506 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3743884c-79ea-47d6-ad97-92d235fd5a98" path="/var/lib/kubelet/pods/3743884c-79ea-47d6-ad97-92d235fd5a98/volumes" Dec 03 19:50:58 crc kubenswrapper[4916]: I1203 19:50:58.505895 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 19:51:00 crc kubenswrapper[4916]: I1203 19:51:00.475682 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 19:51:00 crc kubenswrapper[4916]: I1203 19:51:00.475777 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 19:51:00 crc kubenswrapper[4916]: I1203 19:51:00.515863 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 19:51:00 crc kubenswrapper[4916]: I1203 19:51:00.541321 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 19:51:00 crc kubenswrapper[4916]: I1203 19:51:00.876156 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 19:51:00 crc kubenswrapper[4916]: I1203 19:51:00.876223 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 19:51:01 crc kubenswrapper[4916]: I1203 19:51:01.884854 4916 generic.go:334] "Generic (PLEG): container finished" podID="05ab20e9-de0e-4f40-aa4b-a3b685fe9712" containerID="f8c194e4ca14af6ad3afab72bc55bbf94a95a6bf2536ce315003c0e2ece66735" exitCode=0 Dec 03 19:51:01 crc kubenswrapper[4916]: I1203 19:51:01.884976 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5vk6b" event={"ID":"05ab20e9-de0e-4f40-aa4b-a3b685fe9712","Type":"ContainerDied","Data":"f8c194e4ca14af6ad3afab72bc55bbf94a95a6bf2536ce315003c0e2ece66735"} Dec 03 19:51:02 crc kubenswrapper[4916]: I1203 19:51:02.773887 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 19:51:02 crc kubenswrapper[4916]: I1203 19:51:02.777740 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.304528 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.483635 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwqzt\" (UniqueName: \"kubernetes.io/projected/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-kube-api-access-mwqzt\") pod \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.483774 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-scripts\") pod \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.483882 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-config-data\") pod \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.483956 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-combined-ca-bundle\") pod \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\" (UID: \"05ab20e9-de0e-4f40-aa4b-a3b685fe9712\") " Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.489235 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-scripts" (OuterVolumeSpecName: "scripts") pod "05ab20e9-de0e-4f40-aa4b-a3b685fe9712" (UID: "05ab20e9-de0e-4f40-aa4b-a3b685fe9712"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.489575 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-kube-api-access-mwqzt" (OuterVolumeSpecName: "kube-api-access-mwqzt") pod "05ab20e9-de0e-4f40-aa4b-a3b685fe9712" (UID: "05ab20e9-de0e-4f40-aa4b-a3b685fe9712"). InnerVolumeSpecName "kube-api-access-mwqzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.520906 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05ab20e9-de0e-4f40-aa4b-a3b685fe9712" (UID: "05ab20e9-de0e-4f40-aa4b-a3b685fe9712"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.536659 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-config-data" (OuterVolumeSpecName: "config-data") pod "05ab20e9-de0e-4f40-aa4b-a3b685fe9712" (UID: "05ab20e9-de0e-4f40-aa4b-a3b685fe9712"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.586345 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.586387 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwqzt\" (UniqueName: \"kubernetes.io/projected/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-kube-api-access-mwqzt\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.586400 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.586409 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05ab20e9-de0e-4f40-aa4b-a3b685fe9712-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.903586 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-5vk6b" event={"ID":"05ab20e9-de0e-4f40-aa4b-a3b685fe9712","Type":"ContainerDied","Data":"e76725df7cf54bea7dc0a3eb9b7b159667f504b30386455dc53af766cb9e1a8c"} Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.903609 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-5vk6b" Dec 03 19:51:03 crc kubenswrapper[4916]: I1203 19:51:03.903628 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e76725df7cf54bea7dc0a3eb9b7b159667f504b30386455dc53af766cb9e1a8c" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.029942 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 19:51:04 crc kubenswrapper[4916]: E1203 19:51:04.030328 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05ab20e9-de0e-4f40-aa4b-a3b685fe9712" containerName="nova-cell0-conductor-db-sync" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.030345 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="05ab20e9-de0e-4f40-aa4b-a3b685fe9712" containerName="nova-cell0-conductor-db-sync" Dec 03 19:51:04 crc kubenswrapper[4916]: E1203 19:51:04.030372 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3743884c-79ea-47d6-ad97-92d235fd5a98" containerName="heat-engine" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.030380 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="3743884c-79ea-47d6-ad97-92d235fd5a98" containerName="heat-engine" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.030557 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="3743884c-79ea-47d6-ad97-92d235fd5a98" containerName="heat-engine" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.030592 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="05ab20e9-de0e-4f40-aa4b-a3b685fe9712" containerName="nova-cell0-conductor-db-sync" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.031179 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.032945 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-r5cfq" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.033609 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.042494 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.197042 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5a290e9-1938-4d33-a6b5-f7490d7a6bcc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a5a290e9-1938-4d33-a6b5-f7490d7a6bcc\") " pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.197787 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qj5dx\" (UniqueName: \"kubernetes.io/projected/a5a290e9-1938-4d33-a6b5-f7490d7a6bcc-kube-api-access-qj5dx\") pod \"nova-cell0-conductor-0\" (UID: \"a5a290e9-1938-4d33-a6b5-f7490d7a6bcc\") " pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.197951 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5a290e9-1938-4d33-a6b5-f7490d7a6bcc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a5a290e9-1938-4d33-a6b5-f7490d7a6bcc\") " pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.300385 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5a290e9-1938-4d33-a6b5-f7490d7a6bcc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a5a290e9-1938-4d33-a6b5-f7490d7a6bcc\") " pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.300928 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5a290e9-1938-4d33-a6b5-f7490d7a6bcc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a5a290e9-1938-4d33-a6b5-f7490d7a6bcc\") " pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.301145 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qj5dx\" (UniqueName: \"kubernetes.io/projected/a5a290e9-1938-4d33-a6b5-f7490d7a6bcc-kube-api-access-qj5dx\") pod \"nova-cell0-conductor-0\" (UID: \"a5a290e9-1938-4d33-a6b5-f7490d7a6bcc\") " pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.306220 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5a290e9-1938-4d33-a6b5-f7490d7a6bcc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a5a290e9-1938-4d33-a6b5-f7490d7a6bcc\") " pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.309529 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5a290e9-1938-4d33-a6b5-f7490d7a6bcc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a5a290e9-1938-4d33-a6b5-f7490d7a6bcc\") " pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.332971 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qj5dx\" (UniqueName: \"kubernetes.io/projected/a5a290e9-1938-4d33-a6b5-f7490d7a6bcc-kube-api-access-qj5dx\") pod \"nova-cell0-conductor-0\" (UID: \"a5a290e9-1938-4d33-a6b5-f7490d7a6bcc\") " pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.354066 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.850118 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 19:51:04 crc kubenswrapper[4916]: I1203 19:51:04.914422 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a5a290e9-1938-4d33-a6b5-f7490d7a6bcc","Type":"ContainerStarted","Data":"ccaab357efeedb7610ea889c28470fdcd8d095027437e8ffe6a77866d84ab21a"} Dec 03 19:51:05 crc kubenswrapper[4916]: I1203 19:51:05.927705 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a5a290e9-1938-4d33-a6b5-f7490d7a6bcc","Type":"ContainerStarted","Data":"7baa579b646d827d354105ff0737969232fb4121eb65f4e067556f78af5b9849"} Dec 03 19:51:05 crc kubenswrapper[4916]: I1203 19:51:05.928134 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:05 crc kubenswrapper[4916]: I1203 19:51:05.954718 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.954695598 podStartE2EDuration="2.954695598s" podCreationTimestamp="2025-12-03 19:51:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:51:05.953423934 +0000 UTC m=+1281.916234250" watchObservedRunningTime="2025-12-03 19:51:05.954695598 +0000 UTC m=+1281.917505894" Dec 03 19:51:07 crc kubenswrapper[4916]: I1203 19:51:07.982175 4916 generic.go:334] "Generic (PLEG): container finished" podID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerID="fe07e7f9b2713593652e751d1206e7db8b7d636fe7f6965bd9c7283f887dfe60" exitCode=137 Dec 03 19:51:07 crc kubenswrapper[4916]: I1203 19:51:07.982416 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ef40a4d-a930-428c-a816-ad1afa6d6c04","Type":"ContainerDied","Data":"fe07e7f9b2713593652e751d1206e7db8b7d636fe7f6965bd9c7283f887dfe60"} Dec 03 19:51:07 crc kubenswrapper[4916]: I1203 19:51:07.983134 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8ef40a4d-a930-428c-a816-ad1afa6d6c04","Type":"ContainerDied","Data":"3549d6fdade644f51202dbee95f18ed0766c322b7c0bffde5e170d8d7a7710b4"} Dec 03 19:51:07 crc kubenswrapper[4916]: I1203 19:51:07.983150 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3549d6fdade644f51202dbee95f18ed0766c322b7c0bffde5e170d8d7a7710b4" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.045791 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.180897 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-run-httpd\") pod \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.180947 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-sg-core-conf-yaml\") pod \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.180980 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-scripts\") pod \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.181106 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-log-httpd\") pod \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.181145 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqv87\" (UniqueName: \"kubernetes.io/projected/8ef40a4d-a930-428c-a816-ad1afa6d6c04-kube-api-access-tqv87\") pod \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.181193 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-config-data\") pod \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.181241 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-combined-ca-bundle\") pod \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\" (UID: \"8ef40a4d-a930-428c-a816-ad1afa6d6c04\") " Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.181774 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8ef40a4d-a930-428c-a816-ad1afa6d6c04" (UID: "8ef40a4d-a930-428c-a816-ad1afa6d6c04"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.181788 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8ef40a4d-a930-428c-a816-ad1afa6d6c04" (UID: "8ef40a4d-a930-428c-a816-ad1afa6d6c04"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.187241 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ef40a4d-a930-428c-a816-ad1afa6d6c04-kube-api-access-tqv87" (OuterVolumeSpecName: "kube-api-access-tqv87") pod "8ef40a4d-a930-428c-a816-ad1afa6d6c04" (UID: "8ef40a4d-a930-428c-a816-ad1afa6d6c04"). InnerVolumeSpecName "kube-api-access-tqv87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.190698 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-scripts" (OuterVolumeSpecName: "scripts") pod "8ef40a4d-a930-428c-a816-ad1afa6d6c04" (UID: "8ef40a4d-a930-428c-a816-ad1afa6d6c04"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.216945 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8ef40a4d-a930-428c-a816-ad1afa6d6c04" (UID: "8ef40a4d-a930-428c-a816-ad1afa6d6c04"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.284404 4916 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.284876 4916 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.284897 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.284914 4916 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef40a4d-a930-428c-a816-ad1afa6d6c04-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.284931 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqv87\" (UniqueName: \"kubernetes.io/projected/8ef40a4d-a930-428c-a816-ad1afa6d6c04-kube-api-access-tqv87\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.295607 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-config-data" (OuterVolumeSpecName: "config-data") pod "8ef40a4d-a930-428c-a816-ad1afa6d6c04" (UID: "8ef40a4d-a930-428c-a816-ad1afa6d6c04"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.316313 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ef40a4d-a930-428c-a816-ad1afa6d6c04" (UID: "8ef40a4d-a930-428c-a816-ad1afa6d6c04"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.387065 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.387110 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef40a4d-a930-428c-a816-ad1afa6d6c04-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:08 crc kubenswrapper[4916]: I1203 19:51:08.994288 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.026703 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.039584 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.083781 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:09 crc kubenswrapper[4916]: E1203 19:51:09.084249 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="ceilometer-notification-agent" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.084277 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="ceilometer-notification-agent" Dec 03 19:51:09 crc kubenswrapper[4916]: E1203 19:51:09.084291 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="ceilometer-central-agent" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.084301 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="ceilometer-central-agent" Dec 03 19:51:09 crc kubenswrapper[4916]: E1203 19:51:09.084326 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="proxy-httpd" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.084337 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="proxy-httpd" Dec 03 19:51:09 crc kubenswrapper[4916]: E1203 19:51:09.084396 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="sg-core" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.084423 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="sg-core" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.084749 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="sg-core" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.084784 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="ceilometer-notification-agent" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.084817 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="proxy-httpd" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.084829 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" containerName="ceilometer-central-agent" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.087028 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.089613 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.090667 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.101480 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.204469 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-scripts\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.204754 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcghj\" (UniqueName: \"kubernetes.io/projected/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-kube-api-access-bcghj\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.204865 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-run-httpd\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.204943 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.205054 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-config-data\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.205188 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.205340 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-log-httpd\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.307349 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.307493 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-log-httpd\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.307618 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-scripts\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.307926 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcghj\" (UniqueName: \"kubernetes.io/projected/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-kube-api-access-bcghj\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.308291 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-run-httpd\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.308360 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.308428 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-config-data\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.308818 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-log-httpd\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.312697 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.312724 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-run-httpd\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.314439 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-config-data\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.315054 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.320660 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-scripts\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.339173 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcghj\" (UniqueName: \"kubernetes.io/projected/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-kube-api-access-bcghj\") pod \"ceilometer-0\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.424333 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:09 crc kubenswrapper[4916]: I1203 19:51:09.965317 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:10 crc kubenswrapper[4916]: I1203 19:51:10.003276 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1c749c4-0fe3-43e0-b6b7-2665fb34487d","Type":"ContainerStarted","Data":"de39ee0a56c0352514bcce2d61b77014ceb7317679d561d9c893c8a668449d41"} Dec 03 19:51:10 crc kubenswrapper[4916]: I1203 19:51:10.497232 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ef40a4d-a930-428c-a816-ad1afa6d6c04" path="/var/lib/kubelet/pods/8ef40a4d-a930-428c-a816-ad1afa6d6c04/volumes" Dec 03 19:51:11 crc kubenswrapper[4916]: I1203 19:51:11.013167 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1c749c4-0fe3-43e0-b6b7-2665fb34487d","Type":"ContainerStarted","Data":"7890dd1aa45019999238a08469d847a658b62dcc2791035d3c23e7b940b7d454"} Dec 03 19:51:12 crc kubenswrapper[4916]: I1203 19:51:12.024143 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1c749c4-0fe3-43e0-b6b7-2665fb34487d","Type":"ContainerStarted","Data":"31fbaf62b8c02196c57a896d5380ed9b70c60b372eb9d588cf2e51d98523cae2"} Dec 03 19:51:12 crc kubenswrapper[4916]: I1203 19:51:12.871313 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:13 crc kubenswrapper[4916]: I1203 19:51:13.039852 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1c749c4-0fe3-43e0-b6b7-2665fb34487d","Type":"ContainerStarted","Data":"e2e05f7189350bea03548e8f77d41d26ab844167ea353142861825f749b8dae1"} Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.060824 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1c749c4-0fe3-43e0-b6b7-2665fb34487d","Type":"ContainerStarted","Data":"34d6bb7837af73a6c05714d08eb9f5ff8b6d104d1f28079359de0fd5f039de62"} Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.061177 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="ceilometer-central-agent" containerID="cri-o://7890dd1aa45019999238a08469d847a658b62dcc2791035d3c23e7b940b7d454" gracePeriod=30 Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.061242 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.061295 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="proxy-httpd" containerID="cri-o://34d6bb7837af73a6c05714d08eb9f5ff8b6d104d1f28079359de0fd5f039de62" gracePeriod=30 Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.061341 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="sg-core" containerID="cri-o://e2e05f7189350bea03548e8f77d41d26ab844167ea353142861825f749b8dae1" gracePeriod=30 Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.061391 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="ceilometer-notification-agent" containerID="cri-o://31fbaf62b8c02196c57a896d5380ed9b70c60b372eb9d588cf2e51d98523cae2" gracePeriod=30 Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.097382 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.261278503 podStartE2EDuration="5.097356835s" podCreationTimestamp="2025-12-03 19:51:09 +0000 UTC" firstStartedPulling="2025-12-03 19:51:09.964705732 +0000 UTC m=+1285.927515998" lastFinishedPulling="2025-12-03 19:51:13.800784034 +0000 UTC m=+1289.763594330" observedRunningTime="2025-12-03 19:51:14.081860521 +0000 UTC m=+1290.044670807" watchObservedRunningTime="2025-12-03 19:51:14.097356835 +0000 UTC m=+1290.060167121" Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.406152 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.898757 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-9vjsv"] Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.900594 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.907053 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.907150 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 03 19:51:14 crc kubenswrapper[4916]: I1203 19:51:14.920299 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-9vjsv"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.021664 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-scripts\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.021737 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.021919 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgmmh\" (UniqueName: \"kubernetes.io/projected/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-kube-api-access-sgmmh\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.022173 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-config-data\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.054275 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.055422 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.059044 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.079858 4916 generic.go:334] "Generic (PLEG): container finished" podID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerID="e2e05f7189350bea03548e8f77d41d26ab844167ea353142861825f749b8dae1" exitCode=2 Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.079893 4916 generic.go:334] "Generic (PLEG): container finished" podID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerID="31fbaf62b8c02196c57a896d5380ed9b70c60b372eb9d588cf2e51d98523cae2" exitCode=0 Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.079913 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1c749c4-0fe3-43e0-b6b7-2665fb34487d","Type":"ContainerDied","Data":"e2e05f7189350bea03548e8f77d41d26ab844167ea353142861825f749b8dae1"} Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.079940 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1c749c4-0fe3-43e0-b6b7-2665fb34487d","Type":"ContainerDied","Data":"31fbaf62b8c02196c57a896d5380ed9b70c60b372eb9d588cf2e51d98523cae2"} Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.109546 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.124112 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-scripts\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.124178 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.124228 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgmmh\" (UniqueName: \"kubernetes.io/projected/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-kube-api-access-sgmmh\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.124303 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-config-data\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.133888 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.139055 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-scripts\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.150201 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-config-data\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.158080 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.159682 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.165610 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.167050 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.182620 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgmmh\" (UniqueName: \"kubernetes.io/projected/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-kube-api-access-sgmmh\") pod \"nova-cell0-cell-mapping-9vjsv\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.216202 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.229865 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.230390 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.231138 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmfkv\" (UniqueName: \"kubernetes.io/projected/59d15586-9ea1-4a06-b563-4acac206caeb-kube-api-access-nmfkv\") pod \"nova-cell1-novncproxy-0\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.291047 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.298017 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.304009 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.330152 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.331815 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.332838 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmfkv\" (UniqueName: \"kubernetes.io/projected/59d15586-9ea1-4a06-b563-4acac206caeb-kube-api-access-nmfkv\") pod \"nova-cell1-novncproxy-0\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.356506 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.363321 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmfkv\" (UniqueName: \"kubernetes.io/projected/59d15586-9ea1-4a06-b563-4acac206caeb-kube-api-access-nmfkv\") pod \"nova-cell1-novncproxy-0\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.364003 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.364055 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zl7n7\" (UniqueName: \"kubernetes.io/projected/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-kube-api-access-zl7n7\") pod \"nova-scheduler-0\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.366479 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.366955 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-config-data\") pod \"nova-scheduler-0\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.367015 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.375323 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.376375 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.384557 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.418558 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.440898 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-9b86998b5-j4z8k"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.442526 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.453453 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9b86998b5-j4z8k"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.468800 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4p8s\" (UniqueName: \"kubernetes.io/projected/c1ba18bf-6eb8-445c-b03c-46c867d80430-kube-api-access-r4p8s\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.468865 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xzhc\" (UniqueName: \"kubernetes.io/projected/e34d9c63-d03a-453c-997e-1e47baa58589-kube-api-access-9xzhc\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.468890 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-swift-storage-0\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.468938 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.468967 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-config-data\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.468992 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zl7n7\" (UniqueName: \"kubernetes.io/projected/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-kube-api-access-zl7n7\") pod \"nova-scheduler-0\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.469012 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.469041 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-config\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.469091 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-nb\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.469110 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded59053-6b2a-4d20-bd49-1a444e35ad2f-logs\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.469129 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.469153 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-sb\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.469174 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-config-data\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.469193 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9nnx\" (UniqueName: \"kubernetes.io/projected/ded59053-6b2a-4d20-bd49-1a444e35ad2f-kube-api-access-c9nnx\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.469216 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-config-data\") pod \"nova-scheduler-0\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.469234 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-svc\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.469549 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e34d9c63-d03a-453c-997e-1e47baa58589-logs\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.474403 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.474656 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-config-data\") pod \"nova-scheduler-0\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.483831 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zl7n7\" (UniqueName: \"kubernetes.io/projected/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-kube-api-access-zl7n7\") pod \"nova-scheduler-0\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.571655 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xzhc\" (UniqueName: \"kubernetes.io/projected/e34d9c63-d03a-453c-997e-1e47baa58589-kube-api-access-9xzhc\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.571700 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-swift-storage-0\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.571764 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-config-data\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.571789 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.571818 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-config\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.571867 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-nb\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.571884 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded59053-6b2a-4d20-bd49-1a444e35ad2f-logs\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.571898 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.571913 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-sb\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.572243 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded59053-6b2a-4d20-bd49-1a444e35ad2f-logs\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.572680 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-nb\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.572925 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-swift-storage-0\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.573159 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-sb\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.573577 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-config-data\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.573694 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-config\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.573769 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9nnx\" (UniqueName: \"kubernetes.io/projected/ded59053-6b2a-4d20-bd49-1a444e35ad2f-kube-api-access-c9nnx\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.573816 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-svc\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.573854 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e34d9c63-d03a-453c-997e-1e47baa58589-logs\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.573903 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4p8s\" (UniqueName: \"kubernetes.io/projected/c1ba18bf-6eb8-445c-b03c-46c867d80430-kube-api-access-r4p8s\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.575163 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-svc\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.575760 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.577041 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-config-data\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.577068 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.578223 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e34d9c63-d03a-453c-997e-1e47baa58589-logs\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.579203 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-config-data\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.591033 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xzhc\" (UniqueName: \"kubernetes.io/projected/e34d9c63-d03a-453c-997e-1e47baa58589-kube-api-access-9xzhc\") pod \"nova-metadata-0\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.592052 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9nnx\" (UniqueName: \"kubernetes.io/projected/ded59053-6b2a-4d20-bd49-1a444e35ad2f-kube-api-access-c9nnx\") pod \"nova-api-0\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.593245 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4p8s\" (UniqueName: \"kubernetes.io/projected/c1ba18bf-6eb8-445c-b03c-46c867d80430-kube-api-access-r4p8s\") pod \"dnsmasq-dns-9b86998b5-j4z8k\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.650588 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.672945 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.721928 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.734192 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.768588 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.858049 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-9vjsv"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.915006 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-vrzk5"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.942100 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-vrzk5"] Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.942202 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.946313 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 03 19:51:15 crc kubenswrapper[4916]: I1203 19:51:15.948265 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.086659 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-config-data\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.086743 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.086795 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-scripts\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.086991 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwgwb\" (UniqueName: \"kubernetes.io/projected/46082ae5-9ed1-46c5-8320-d7477415de04-kube-api-access-bwgwb\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.094909 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9vjsv" event={"ID":"34e53902-e2e3-4757-b7ad-b9ff5431bd8c","Type":"ContainerStarted","Data":"624e08cbb2e3ff19e795f92f3908088e39516e44a38be48e3374dc6d207e9f0c"} Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.138163 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.158874 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.159159 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.188797 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-config-data\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.188841 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.188867 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-scripts\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.188945 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwgwb\" (UniqueName: \"kubernetes.io/projected/46082ae5-9ed1-46c5-8320-d7477415de04-kube-api-access-bwgwb\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.195707 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.196678 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-config-data\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.206640 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-scripts\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.208185 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwgwb\" (UniqueName: \"kubernetes.io/projected/46082ae5-9ed1-46c5-8320-d7477415de04-kube-api-access-bwgwb\") pod \"nova-cell1-conductor-db-sync-vrzk5\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.281912 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.297223 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.441448 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-9b86998b5-j4z8k"] Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.459134 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.531498 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:51:16 crc kubenswrapper[4916]: I1203 19:51:16.788824 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-vrzk5"] Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.246961 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-vrzk5" event={"ID":"46082ae5-9ed1-46c5-8320-d7477415de04","Type":"ContainerStarted","Data":"b543e5ffd3f1a72659a5afbc25a8916c3f6113dc87c22096da4b20b1b42d943f"} Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.247648 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-vrzk5" event={"ID":"46082ae5-9ed1-46c5-8320-d7477415de04","Type":"ContainerStarted","Data":"0e2fe15fccac3e543867ee5d2118158f5fb5938b5abb7af3a0b39d81e25ea79f"} Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.251335 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"59d15586-9ea1-4a06-b563-4acac206caeb","Type":"ContainerStarted","Data":"8575de5f3b4b5e87942106fd14e16205f2375a16d58d94d8dae78d92792ef8b4"} Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.254117 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fd9f1aab-89f3-43d5-a18e-54220c1f05d8","Type":"ContainerStarted","Data":"82bcdb77f8c90722c4a1439e2f4dc04d8825ae7fd5ace5a80a0111470ff7441e"} Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.280442 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded59053-6b2a-4d20-bd49-1a444e35ad2f","Type":"ContainerStarted","Data":"9dd4be3aed5ae9e805c3fec19ab09494bb3c855bb63be7baf173ce5d923b357a"} Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.303708 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-vrzk5" podStartSLOduration=3.303684919 podStartE2EDuration="3.303684919s" podCreationTimestamp="2025-12-03 19:51:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:51:18.29360882 +0000 UTC m=+1294.256419086" watchObservedRunningTime="2025-12-03 19:51:18.303684919 +0000 UTC m=+1294.266495185" Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.317326 4916 generic.go:334] "Generic (PLEG): container finished" podID="c1ba18bf-6eb8-445c-b03c-46c867d80430" containerID="94204763d6de76050c8b7d0c42aa74ff2688607265a1d16eb3c8a09c9c4ba063" exitCode=0 Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.317435 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" event={"ID":"c1ba18bf-6eb8-445c-b03c-46c867d80430","Type":"ContainerDied","Data":"94204763d6de76050c8b7d0c42aa74ff2688607265a1d16eb3c8a09c9c4ba063"} Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.317471 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" event={"ID":"c1ba18bf-6eb8-445c-b03c-46c867d80430","Type":"ContainerStarted","Data":"ebe262875e3c26748ec8689f5146c9963b6fe94bef26a15e7cffee1e17957339"} Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.329631 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9vjsv" event={"ID":"34e53902-e2e3-4757-b7ad-b9ff5431bd8c","Type":"ContainerStarted","Data":"89be2dc057219ac07098b5d3860cdbe776077e2ab2a6120e7572e388f99fbe80"} Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.334692 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e34d9c63-d03a-453c-997e-1e47baa58589","Type":"ContainerStarted","Data":"0bc68f795e7e3dd0d319840f4a90b9e293cdcafb08da1c012a5de40f4d4aaaad"} Dec 03 19:51:18 crc kubenswrapper[4916]: I1203 19:51:18.371735 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-9vjsv" podStartSLOduration=4.371714819 podStartE2EDuration="4.371714819s" podCreationTimestamp="2025-12-03 19:51:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:51:18.353952134 +0000 UTC m=+1294.316762400" watchObservedRunningTime="2025-12-03 19:51:18.371714819 +0000 UTC m=+1294.334525085" Dec 03 19:51:19 crc kubenswrapper[4916]: I1203 19:51:19.216414 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:51:19 crc kubenswrapper[4916]: I1203 19:51:19.228871 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.368612 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" event={"ID":"c1ba18bf-6eb8-445c-b03c-46c867d80430","Type":"ContainerStarted","Data":"897a04a7dec9589a3e29cd25e43cf4d06c573332914d31ade95414f1996cd25b"} Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.368930 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.372011 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e34d9c63-d03a-453c-997e-1e47baa58589","Type":"ContainerStarted","Data":"d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0"} Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.372048 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e34d9c63-d03a-453c-997e-1e47baa58589","Type":"ContainerStarted","Data":"67b923533375b27d732eb895cf7ae25a63a19e77d8647c0ccc2294ad06bc967f"} Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.372099 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e34d9c63-d03a-453c-997e-1e47baa58589" containerName="nova-metadata-metadata" containerID="cri-o://d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0" gracePeriod=30 Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.372120 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e34d9c63-d03a-453c-997e-1e47baa58589" containerName="nova-metadata-log" containerID="cri-o://67b923533375b27d732eb895cf7ae25a63a19e77d8647c0ccc2294ad06bc967f" gracePeriod=30 Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.374236 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"59d15586-9ea1-4a06-b563-4acac206caeb","Type":"ContainerStarted","Data":"b06340a6e567e08435a2d1ac3c5c9bb85905d9247d2d117e31360a413c189093"} Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.374354 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="59d15586-9ea1-4a06-b563-4acac206caeb" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://b06340a6e567e08435a2d1ac3c5c9bb85905d9247d2d117e31360a413c189093" gracePeriod=30 Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.379188 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fd9f1aab-89f3-43d5-a18e-54220c1f05d8","Type":"ContainerStarted","Data":"d7e3ef3c5f0f6e7b2a1952b19d8c513b5b71a227840951818c797e135dbf829a"} Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.382491 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded59053-6b2a-4d20-bd49-1a444e35ad2f","Type":"ContainerStarted","Data":"bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa"} Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.382520 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded59053-6b2a-4d20-bd49-1a444e35ad2f","Type":"ContainerStarted","Data":"fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf"} Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.387893 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" podStartSLOduration=6.387878022 podStartE2EDuration="6.387878022s" podCreationTimestamp="2025-12-03 19:51:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:51:21.385493619 +0000 UTC m=+1297.348303885" watchObservedRunningTime="2025-12-03 19:51:21.387878022 +0000 UTC m=+1297.350688288" Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.414199 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.973994888 podStartE2EDuration="6.414182376s" podCreationTimestamp="2025-12-03 19:51:15 +0000 UTC" firstStartedPulling="2025-12-03 19:51:16.13763247 +0000 UTC m=+1292.100442746" lastFinishedPulling="2025-12-03 19:51:20.577819978 +0000 UTC m=+1296.540630234" observedRunningTime="2025-12-03 19:51:21.407505717 +0000 UTC m=+1297.370315983" watchObservedRunningTime="2025-12-03 19:51:21.414182376 +0000 UTC m=+1297.376992632" Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.431383 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.31398656 podStartE2EDuration="6.431358185s" podCreationTimestamp="2025-12-03 19:51:15 +0000 UTC" firstStartedPulling="2025-12-03 19:51:16.462085417 +0000 UTC m=+1292.424895683" lastFinishedPulling="2025-12-03 19:51:20.579457042 +0000 UTC m=+1296.542267308" observedRunningTime="2025-12-03 19:51:21.424770169 +0000 UTC m=+1297.387580455" watchObservedRunningTime="2025-12-03 19:51:21.431358185 +0000 UTC m=+1297.394168451" Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.448968 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.1741154 podStartE2EDuration="6.448950046s" podCreationTimestamp="2025-12-03 19:51:15 +0000 UTC" firstStartedPulling="2025-12-03 19:51:16.303757023 +0000 UTC m=+1292.266567289" lastFinishedPulling="2025-12-03 19:51:20.578591659 +0000 UTC m=+1296.541401935" observedRunningTime="2025-12-03 19:51:21.44089129 +0000 UTC m=+1297.403701566" watchObservedRunningTime="2025-12-03 19:51:21.448950046 +0000 UTC m=+1297.411760312" Dec 03 19:51:21 crc kubenswrapper[4916]: I1203 19:51:21.462326 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.323655549 podStartE2EDuration="6.462307343s" podCreationTimestamp="2025-12-03 19:51:15 +0000 UTC" firstStartedPulling="2025-12-03 19:51:16.454286498 +0000 UTC m=+1292.417096764" lastFinishedPulling="2025-12-03 19:51:20.592938292 +0000 UTC m=+1296.555748558" observedRunningTime="2025-12-03 19:51:21.459068346 +0000 UTC m=+1297.421878632" watchObservedRunningTime="2025-12-03 19:51:21.462307343 +0000 UTC m=+1297.425117609" Dec 03 19:51:22 crc kubenswrapper[4916]: I1203 19:51:22.394277 4916 generic.go:334] "Generic (PLEG): container finished" podID="e34d9c63-d03a-453c-997e-1e47baa58589" containerID="67b923533375b27d732eb895cf7ae25a63a19e77d8647c0ccc2294ad06bc967f" exitCode=143 Dec 03 19:51:22 crc kubenswrapper[4916]: I1203 19:51:22.394362 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e34d9c63-d03a-453c-997e-1e47baa58589","Type":"ContainerDied","Data":"67b923533375b27d732eb895cf7ae25a63a19e77d8647c0ccc2294ad06bc967f"} Dec 03 19:51:23 crc kubenswrapper[4916]: I1203 19:51:23.408309 4916 generic.go:334] "Generic (PLEG): container finished" podID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerID="7890dd1aa45019999238a08469d847a658b62dcc2791035d3c23e7b940b7d454" exitCode=0 Dec 03 19:51:23 crc kubenswrapper[4916]: I1203 19:51:23.408386 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1c749c4-0fe3-43e0-b6b7-2665fb34487d","Type":"ContainerDied","Data":"7890dd1aa45019999238a08469d847a658b62dcc2791035d3c23e7b940b7d454"} Dec 03 19:51:24 crc kubenswrapper[4916]: I1203 19:51:24.422388 4916 generic.go:334] "Generic (PLEG): container finished" podID="34e53902-e2e3-4757-b7ad-b9ff5431bd8c" containerID="89be2dc057219ac07098b5d3860cdbe776077e2ab2a6120e7572e388f99fbe80" exitCode=0 Dec 03 19:51:24 crc kubenswrapper[4916]: I1203 19:51:24.422456 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9vjsv" event={"ID":"34e53902-e2e3-4757-b7ad-b9ff5431bd8c","Type":"ContainerDied","Data":"89be2dc057219ac07098b5d3860cdbe776077e2ab2a6120e7572e388f99fbe80"} Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.436143 4916 generic.go:334] "Generic (PLEG): container finished" podID="46082ae5-9ed1-46c5-8320-d7477415de04" containerID="b543e5ffd3f1a72659a5afbc25a8916c3f6113dc87c22096da4b20b1b42d943f" exitCode=0 Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.436269 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-vrzk5" event={"ID":"46082ae5-9ed1-46c5-8320-d7477415de04","Type":"ContainerDied","Data":"b543e5ffd3f1a72659a5afbc25a8916c3f6113dc87c22096da4b20b1b42d943f"} Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.651239 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.651317 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.674317 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.704208 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.722931 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.723045 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.735507 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.735555 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.880921 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.919125 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-combined-ca-bundle\") pod \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.919214 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-config-data\") pod \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.919361 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgmmh\" (UniqueName: \"kubernetes.io/projected/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-kube-api-access-sgmmh\") pod \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.919425 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-scripts\") pod \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\" (UID: \"34e53902-e2e3-4757-b7ad-b9ff5431bd8c\") " Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.927562 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-kube-api-access-sgmmh" (OuterVolumeSpecName: "kube-api-access-sgmmh") pod "34e53902-e2e3-4757-b7ad-b9ff5431bd8c" (UID: "34e53902-e2e3-4757-b7ad-b9ff5431bd8c"). InnerVolumeSpecName "kube-api-access-sgmmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.934612 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-scripts" (OuterVolumeSpecName: "scripts") pod "34e53902-e2e3-4757-b7ad-b9ff5431bd8c" (UID: "34e53902-e2e3-4757-b7ad-b9ff5431bd8c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.962767 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34e53902-e2e3-4757-b7ad-b9ff5431bd8c" (UID: "34e53902-e2e3-4757-b7ad-b9ff5431bd8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:25 crc kubenswrapper[4916]: I1203 19:51:25.971888 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-config-data" (OuterVolumeSpecName: "config-data") pod "34e53902-e2e3-4757-b7ad-b9ff5431bd8c" (UID: "34e53902-e2e3-4757-b7ad-b9ff5431bd8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.021776 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.021810 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.021827 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgmmh\" (UniqueName: \"kubernetes.io/projected/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-kube-api-access-sgmmh\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.021839 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/34e53902-e2e3-4757-b7ad-b9ff5431bd8c-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.446144 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-9vjsv" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.448652 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-9vjsv" event={"ID":"34e53902-e2e3-4757-b7ad-b9ff5431bd8c","Type":"ContainerDied","Data":"624e08cbb2e3ff19e795f92f3908088e39516e44a38be48e3374dc6d207e9f0c"} Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.448714 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="624e08cbb2e3ff19e795f92f3908088e39516e44a38be48e3374dc6d207e9f0c" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.496489 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.646841 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.731057 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.824773 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.825017 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.837680 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-config-data\") pod \"46082ae5-9ed1-46c5-8320-d7477415de04\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.837813 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-scripts\") pod \"46082ae5-9ed1-46c5-8320-d7477415de04\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.837870 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwgwb\" (UniqueName: \"kubernetes.io/projected/46082ae5-9ed1-46c5-8320-d7477415de04-kube-api-access-bwgwb\") pod \"46082ae5-9ed1-46c5-8320-d7477415de04\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.837927 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-combined-ca-bundle\") pod \"46082ae5-9ed1-46c5-8320-d7477415de04\" (UID: \"46082ae5-9ed1-46c5-8320-d7477415de04\") " Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.855003 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-scripts" (OuterVolumeSpecName: "scripts") pod "46082ae5-9ed1-46c5-8320-d7477415de04" (UID: "46082ae5-9ed1-46c5-8320-d7477415de04"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.863049 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46082ae5-9ed1-46c5-8320-d7477415de04-kube-api-access-bwgwb" (OuterVolumeSpecName: "kube-api-access-bwgwb") pod "46082ae5-9ed1-46c5-8320-d7477415de04" (UID: "46082ae5-9ed1-46c5-8320-d7477415de04"). InnerVolumeSpecName "kube-api-access-bwgwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.875344 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "46082ae5-9ed1-46c5-8320-d7477415de04" (UID: "46082ae5-9ed1-46c5-8320-d7477415de04"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.909171 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-config-data" (OuterVolumeSpecName: "config-data") pod "46082ae5-9ed1-46c5-8320-d7477415de04" (UID: "46082ae5-9ed1-46c5-8320-d7477415de04"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.944665 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.944706 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.944720 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwgwb\" (UniqueName: \"kubernetes.io/projected/46082ae5-9ed1-46c5-8320-d7477415de04-kube-api-access-bwgwb\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.944733 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46082ae5-9ed1-46c5-8320-d7477415de04-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:26 crc kubenswrapper[4916]: I1203 19:51:26.968170 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.454686 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-vrzk5" event={"ID":"46082ae5-9ed1-46c5-8320-d7477415de04","Type":"ContainerDied","Data":"0e2fe15fccac3e543867ee5d2118158f5fb5938b5abb7af3a0b39d81e25ea79f"} Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.454742 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e2fe15fccac3e543867ee5d2118158f5fb5938b5abb7af3a0b39d81e25ea79f" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.454791 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-vrzk5" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.454843 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerName="nova-api-log" containerID="cri-o://fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf" gracePeriod=30 Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.454903 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerName="nova-api-api" containerID="cri-o://bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa" gracePeriod=30 Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.552538 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 19:51:27 crc kubenswrapper[4916]: E1203 19:51:27.552900 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46082ae5-9ed1-46c5-8320-d7477415de04" containerName="nova-cell1-conductor-db-sync" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.552917 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="46082ae5-9ed1-46c5-8320-d7477415de04" containerName="nova-cell1-conductor-db-sync" Dec 03 19:51:27 crc kubenswrapper[4916]: E1203 19:51:27.552938 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34e53902-e2e3-4757-b7ad-b9ff5431bd8c" containerName="nova-manage" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.552944 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="34e53902-e2e3-4757-b7ad-b9ff5431bd8c" containerName="nova-manage" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.553103 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="34e53902-e2e3-4757-b7ad-b9ff5431bd8c" containerName="nova-manage" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.553117 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="46082ae5-9ed1-46c5-8320-d7477415de04" containerName="nova-cell1-conductor-db-sync" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.553710 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.555480 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.568374 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.660211 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22c68a9c-f222-4118-b636-311954e0d502-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"22c68a9c-f222-4118-b636-311954e0d502\") " pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.660296 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22c68a9c-f222-4118-b636-311954e0d502-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"22c68a9c-f222-4118-b636-311954e0d502\") " pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.660526 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2gx5\" (UniqueName: \"kubernetes.io/projected/22c68a9c-f222-4118-b636-311954e0d502-kube-api-access-b2gx5\") pod \"nova-cell1-conductor-0\" (UID: \"22c68a9c-f222-4118-b636-311954e0d502\") " pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.761782 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22c68a9c-f222-4118-b636-311954e0d502-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"22c68a9c-f222-4118-b636-311954e0d502\") " pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.761873 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2gx5\" (UniqueName: \"kubernetes.io/projected/22c68a9c-f222-4118-b636-311954e0d502-kube-api-access-b2gx5\") pod \"nova-cell1-conductor-0\" (UID: \"22c68a9c-f222-4118-b636-311954e0d502\") " pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.761969 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22c68a9c-f222-4118-b636-311954e0d502-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"22c68a9c-f222-4118-b636-311954e0d502\") " pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.766279 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22c68a9c-f222-4118-b636-311954e0d502-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"22c68a9c-f222-4118-b636-311954e0d502\") " pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.766783 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22c68a9c-f222-4118-b636-311954e0d502-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"22c68a9c-f222-4118-b636-311954e0d502\") " pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.776626 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2gx5\" (UniqueName: \"kubernetes.io/projected/22c68a9c-f222-4118-b636-311954e0d502-kube-api-access-b2gx5\") pod \"nova-cell1-conductor-0\" (UID: \"22c68a9c-f222-4118-b636-311954e0d502\") " pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:27 crc kubenswrapper[4916]: E1203 19:51:27.839742 4916 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/a6b29405eb3865227e036dd8b1da8655a1de2710e20b917f576aa57137bb2c08/diff" to get inode usage: stat /var/lib/containers/storage/overlay/a6b29405eb3865227e036dd8b1da8655a1de2710e20b917f576aa57137bb2c08/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_heat-engine-6dd8857784-86hhw_3743884c-79ea-47d6-ad97-92d235fd5a98/heat-engine/0.log" to get inode usage: stat /var/log/pods/openstack_heat-engine-6dd8857784-86hhw_3743884c-79ea-47d6-ad97-92d235fd5a98/heat-engine/0.log: no such file or directory Dec 03 19:51:27 crc kubenswrapper[4916]: I1203 19:51:27.927513 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:28 crc kubenswrapper[4916]: I1203 19:51:28.463678 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 19:51:28 crc kubenswrapper[4916]: I1203 19:51:28.465033 4916 generic.go:334] "Generic (PLEG): container finished" podID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerID="fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf" exitCode=143 Dec 03 19:51:28 crc kubenswrapper[4916]: I1203 19:51:28.465071 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded59053-6b2a-4d20-bd49-1a444e35ad2f","Type":"ContainerDied","Data":"fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf"} Dec 03 19:51:28 crc kubenswrapper[4916]: I1203 19:51:28.465220 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="fd9f1aab-89f3-43d5-a18e-54220c1f05d8" containerName="nova-scheduler-scheduler" containerID="cri-o://d7e3ef3c5f0f6e7b2a1952b19d8c513b5b71a227840951818c797e135dbf829a" gracePeriod=30 Dec 03 19:51:29 crc kubenswrapper[4916]: I1203 19:51:29.479814 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"22c68a9c-f222-4118-b636-311954e0d502","Type":"ContainerStarted","Data":"35a3140bc6f7ae23e545dc76ef3513e37221ce89925cb51ad11871c8ff736dbb"} Dec 03 19:51:29 crc kubenswrapper[4916]: I1203 19:51:29.480731 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"22c68a9c-f222-4118-b636-311954e0d502","Type":"ContainerStarted","Data":"ad2e695a3ad2684a80bcb99f798e70fd88fbfb98c6fc70646a1e19734de55058"} Dec 03 19:51:29 crc kubenswrapper[4916]: I1203 19:51:29.480806 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:29 crc kubenswrapper[4916]: I1203 19:51:29.505737 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.505716815 podStartE2EDuration="2.505716815s" podCreationTimestamp="2025-12-03 19:51:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:51:29.494662149 +0000 UTC m=+1305.457472425" watchObservedRunningTime="2025-12-03 19:51:29.505716815 +0000 UTC m=+1305.468527091" Dec 03 19:51:30 crc kubenswrapper[4916]: E1203 19:51:30.653342 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7e3ef3c5f0f6e7b2a1952b19d8c513b5b71a227840951818c797e135dbf829a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 19:51:30 crc kubenswrapper[4916]: E1203 19:51:30.656408 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7e3ef3c5f0f6e7b2a1952b19d8c513b5b71a227840951818c797e135dbf829a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 19:51:30 crc kubenswrapper[4916]: E1203 19:51:30.658855 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d7e3ef3c5f0f6e7b2a1952b19d8c513b5b71a227840951818c797e135dbf829a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 19:51:30 crc kubenswrapper[4916]: E1203 19:51:30.658910 4916 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="fd9f1aab-89f3-43d5-a18e-54220c1f05d8" containerName="nova-scheduler-scheduler" Dec 03 19:51:30 crc kubenswrapper[4916]: I1203 19:51:30.770521 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:51:30 crc kubenswrapper[4916]: I1203 19:51:30.862991 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7756b9d78c-6kh76"] Dec 03 19:51:30 crc kubenswrapper[4916]: I1203 19:51:30.864014 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" podUID="337f63c9-1130-480a-9fa4-8b869540333d" containerName="dnsmasq-dns" containerID="cri-o://16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113" gracePeriod=10 Dec 03 19:51:31 crc kubenswrapper[4916]: E1203 19:51:31.327181 4916 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/b07ad44eebc2b0700d5abac65259201f7fff1eea5c61dc5b57237af2bc39c81a/diff" to get inode usage: stat /var/lib/containers/storage/overlay/b07ad44eebc2b0700d5abac65259201f7fff1eea5c61dc5b57237af2bc39c81a/diff: no such file or directory, extraDiskErr: Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.379287 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.428631 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-swift-storage-0\") pod \"337f63c9-1130-480a-9fa4-8b869540333d\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.428725 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-nb\") pod \"337f63c9-1130-480a-9fa4-8b869540333d\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.428793 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsbqw\" (UniqueName: \"kubernetes.io/projected/337f63c9-1130-480a-9fa4-8b869540333d-kube-api-access-hsbqw\") pod \"337f63c9-1130-480a-9fa4-8b869540333d\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.428933 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-svc\") pod \"337f63c9-1130-480a-9fa4-8b869540333d\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.429012 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-sb\") pod \"337f63c9-1130-480a-9fa4-8b869540333d\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.429185 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-config\") pod \"337f63c9-1130-480a-9fa4-8b869540333d\" (UID: \"337f63c9-1130-480a-9fa4-8b869540333d\") " Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.442259 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/337f63c9-1130-480a-9fa4-8b869540333d-kube-api-access-hsbqw" (OuterVolumeSpecName: "kube-api-access-hsbqw") pod "337f63c9-1130-480a-9fa4-8b869540333d" (UID: "337f63c9-1130-480a-9fa4-8b869540333d"). InnerVolumeSpecName "kube-api-access-hsbqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.499418 4916 generic.go:334] "Generic (PLEG): container finished" podID="337f63c9-1130-480a-9fa4-8b869540333d" containerID="16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113" exitCode=0 Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.499458 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.499513 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" event={"ID":"337f63c9-1130-480a-9fa4-8b869540333d","Type":"ContainerDied","Data":"16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113"} Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.499537 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7756b9d78c-6kh76" event={"ID":"337f63c9-1130-480a-9fa4-8b869540333d","Type":"ContainerDied","Data":"c5d8e2be6eaf9219995da1e48f15f2850c3105cfd8848bd5b67f16223dd97c0c"} Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.499553 4916 scope.go:117] "RemoveContainer" containerID="16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.523091 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-config" (OuterVolumeSpecName: "config") pod "337f63c9-1130-480a-9fa4-8b869540333d" (UID: "337f63c9-1130-480a-9fa4-8b869540333d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.524216 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "337f63c9-1130-480a-9fa4-8b869540333d" (UID: "337f63c9-1130-480a-9fa4-8b869540333d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.530821 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "337f63c9-1130-480a-9fa4-8b869540333d" (UID: "337f63c9-1130-480a-9fa4-8b869540333d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.534173 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.534214 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.534226 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.534234 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsbqw\" (UniqueName: \"kubernetes.io/projected/337f63c9-1130-480a-9fa4-8b869540333d-kube-api-access-hsbqw\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.538133 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "337f63c9-1130-480a-9fa4-8b869540333d" (UID: "337f63c9-1130-480a-9fa4-8b869540333d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.548731 4916 scope.go:117] "RemoveContainer" containerID="7469afa4f06848d0d47aa3e5a358a3c0edb2354d0ce188905924d99687989121" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.558656 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "337f63c9-1130-480a-9fa4-8b869540333d" (UID: "337f63c9-1130-480a-9fa4-8b869540333d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.578327 4916 scope.go:117] "RemoveContainer" containerID="16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113" Dec 03 19:51:31 crc kubenswrapper[4916]: E1203 19:51:31.578822 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113\": container with ID starting with 16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113 not found: ID does not exist" containerID="16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.578863 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113"} err="failed to get container status \"16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113\": rpc error: code = NotFound desc = could not find container \"16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113\": container with ID starting with 16c26fa646f10762bde3d409649d51be1fb4953c17ae48085996b65ab98f0113 not found: ID does not exist" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.578890 4916 scope.go:117] "RemoveContainer" containerID="7469afa4f06848d0d47aa3e5a358a3c0edb2354d0ce188905924d99687989121" Dec 03 19:51:31 crc kubenswrapper[4916]: E1203 19:51:31.579303 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7469afa4f06848d0d47aa3e5a358a3c0edb2354d0ce188905924d99687989121\": container with ID starting with 7469afa4f06848d0d47aa3e5a358a3c0edb2354d0ce188905924d99687989121 not found: ID does not exist" containerID="7469afa4f06848d0d47aa3e5a358a3c0edb2354d0ce188905924d99687989121" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.579353 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7469afa4f06848d0d47aa3e5a358a3c0edb2354d0ce188905924d99687989121"} err="failed to get container status \"7469afa4f06848d0d47aa3e5a358a3c0edb2354d0ce188905924d99687989121\": rpc error: code = NotFound desc = could not find container \"7469afa4f06848d0d47aa3e5a358a3c0edb2354d0ce188905924d99687989121\": container with ID starting with 7469afa4f06848d0d47aa3e5a358a3c0edb2354d0ce188905924d99687989121 not found: ID does not exist" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.636908 4916 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.636935 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/337f63c9-1130-480a-9fa4-8b869540333d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.836634 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7756b9d78c-6kh76"] Dec 03 19:51:31 crc kubenswrapper[4916]: I1203 19:51:31.844244 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7756b9d78c-6kh76"] Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.298583 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.353072 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded59053-6b2a-4d20-bd49-1a444e35ad2f-logs\") pod \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.353203 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-combined-ca-bundle\") pod \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.353757 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ded59053-6b2a-4d20-bd49-1a444e35ad2f-logs" (OuterVolumeSpecName: "logs") pod "ded59053-6b2a-4d20-bd49-1a444e35ad2f" (UID: "ded59053-6b2a-4d20-bd49-1a444e35ad2f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.354175 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-config-data\") pod \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.354228 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9nnx\" (UniqueName: \"kubernetes.io/projected/ded59053-6b2a-4d20-bd49-1a444e35ad2f-kube-api-access-c9nnx\") pod \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\" (UID: \"ded59053-6b2a-4d20-bd49-1a444e35ad2f\") " Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.354643 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded59053-6b2a-4d20-bd49-1a444e35ad2f-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.366759 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ded59053-6b2a-4d20-bd49-1a444e35ad2f-kube-api-access-c9nnx" (OuterVolumeSpecName: "kube-api-access-c9nnx") pod "ded59053-6b2a-4d20-bd49-1a444e35ad2f" (UID: "ded59053-6b2a-4d20-bd49-1a444e35ad2f"). InnerVolumeSpecName "kube-api-access-c9nnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.413761 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ded59053-6b2a-4d20-bd49-1a444e35ad2f" (UID: "ded59053-6b2a-4d20-bd49-1a444e35ad2f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.445746 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-config-data" (OuterVolumeSpecName: "config-data") pod "ded59053-6b2a-4d20-bd49-1a444e35ad2f" (UID: "ded59053-6b2a-4d20-bd49-1a444e35ad2f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.457651 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.457706 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9nnx\" (UniqueName: \"kubernetes.io/projected/ded59053-6b2a-4d20-bd49-1a444e35ad2f-kube-api-access-c9nnx\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.457717 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded59053-6b2a-4d20-bd49-1a444e35ad2f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.488685 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="337f63c9-1130-480a-9fa4-8b869540333d" path="/var/lib/kubelet/pods/337f63c9-1130-480a-9fa4-8b869540333d/volumes" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.509571 4916 generic.go:334] "Generic (PLEG): container finished" podID="fd9f1aab-89f3-43d5-a18e-54220c1f05d8" containerID="d7e3ef3c5f0f6e7b2a1952b19d8c513b5b71a227840951818c797e135dbf829a" exitCode=0 Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.509624 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fd9f1aab-89f3-43d5-a18e-54220c1f05d8","Type":"ContainerDied","Data":"d7e3ef3c5f0f6e7b2a1952b19d8c513b5b71a227840951818c797e135dbf829a"} Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.512347 4916 generic.go:334] "Generic (PLEG): container finished" podID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerID="bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa" exitCode=0 Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.512389 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded59053-6b2a-4d20-bd49-1a444e35ad2f","Type":"ContainerDied","Data":"bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa"} Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.512415 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded59053-6b2a-4d20-bd49-1a444e35ad2f","Type":"ContainerDied","Data":"9dd4be3aed5ae9e805c3fec19ab09494bb3c855bb63be7baf173ce5d923b357a"} Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.512433 4916 scope.go:117] "RemoveContainer" containerID="bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.512447 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.582586 4916 scope.go:117] "RemoveContainer" containerID="fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.594560 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.607340 4916 scope.go:117] "RemoveContainer" containerID="bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa" Dec 03 19:51:32 crc kubenswrapper[4916]: E1203 19:51:32.607819 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa\": container with ID starting with bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa not found: ID does not exist" containerID="bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.607854 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa"} err="failed to get container status \"bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa\": rpc error: code = NotFound desc = could not find container \"bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa\": container with ID starting with bcc22f0121ddf07a5ec931c7ac6b62341d3c5688aa5bc6a1ff3a440a8b6badaa not found: ID does not exist" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.607878 4916 scope.go:117] "RemoveContainer" containerID="fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf" Dec 03 19:51:32 crc kubenswrapper[4916]: E1203 19:51:32.608205 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf\": container with ID starting with fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf not found: ID does not exist" containerID="fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.608232 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf"} err="failed to get container status \"fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf\": rpc error: code = NotFound desc = could not find container \"fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf\": container with ID starting with fe7644b41a20a476e316314102a3db23409eaf78b8e9831286153e11b49e8cbf not found: ID does not exist" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.610328 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.620738 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 19:51:32 crc kubenswrapper[4916]: E1203 19:51:32.621230 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerName="nova-api-log" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.621249 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerName="nova-api-log" Dec 03 19:51:32 crc kubenswrapper[4916]: E1203 19:51:32.621272 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerName="nova-api-api" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.621281 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerName="nova-api-api" Dec 03 19:51:32 crc kubenswrapper[4916]: E1203 19:51:32.621480 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="337f63c9-1130-480a-9fa4-8b869540333d" containerName="init" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.621488 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="337f63c9-1130-480a-9fa4-8b869540333d" containerName="init" Dec 03 19:51:32 crc kubenswrapper[4916]: E1203 19:51:32.621497 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="337f63c9-1130-480a-9fa4-8b869540333d" containerName="dnsmasq-dns" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.621505 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="337f63c9-1130-480a-9fa4-8b869540333d" containerName="dnsmasq-dns" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.621734 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerName="nova-api-api" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.621764 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" containerName="nova-api-log" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.621784 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="337f63c9-1130-480a-9fa4-8b869540333d" containerName="dnsmasq-dns" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.623000 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.624965 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.630220 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.661745 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.661830 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-config-data\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.661959 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-logs\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.662008 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6rwc\" (UniqueName: \"kubernetes.io/projected/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-kube-api-access-k6rwc\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.764217 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.764294 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-config-data\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.764444 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-logs\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.764508 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6rwc\" (UniqueName: \"kubernetes.io/projected/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-kube-api-access-k6rwc\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.766464 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-logs\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.769727 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.769815 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-config-data\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.792196 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6rwc\" (UniqueName: \"kubernetes.io/projected/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-kube-api-access-k6rwc\") pod \"nova-api-0\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " pod="openstack/nova-api-0" Dec 03 19:51:32 crc kubenswrapper[4916]: I1203 19:51:32.939382 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:51:33 crc kubenswrapper[4916]: W1203 19:51:33.244675 4916 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34e53902_e2e3_4757_b7ad_b9ff5431bd8c.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod34e53902_e2e3_4757_b7ad_b9ff5431bd8c.slice: no such file or directory Dec 03 19:51:33 crc kubenswrapper[4916]: W1203 19:51:33.247949 4916 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podded59053_6b2a_4d20_bd49_1a444e35ad2f.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podded59053_6b2a_4d20_bd49_1a444e35ad2f.slice: no such file or directory Dec 03 19:51:33 crc kubenswrapper[4916]: W1203 19:51:33.263473 4916 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46082ae5_9ed1_46c5_8320_d7477415de04.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46082ae5_9ed1_46c5_8320_d7477415de04.slice: no such file or directory Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.397365 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.477709 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-combined-ca-bundle\") pod \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.477756 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zl7n7\" (UniqueName: \"kubernetes.io/projected/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-kube-api-access-zl7n7\") pod \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.477840 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-config-data\") pod \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\" (UID: \"fd9f1aab-89f3-43d5-a18e-54220c1f05d8\") " Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.478889 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.483328 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-kube-api-access-zl7n7" (OuterVolumeSpecName: "kube-api-access-zl7n7") pod "fd9f1aab-89f3-43d5-a18e-54220c1f05d8" (UID: "fd9f1aab-89f3-43d5-a18e-54220c1f05d8"). InnerVolumeSpecName "kube-api-access-zl7n7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.499823 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.507423 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-config-data" (OuterVolumeSpecName: "config-data") pod "fd9f1aab-89f3-43d5-a18e-54220c1f05d8" (UID: "fd9f1aab-89f3-43d5-a18e-54220c1f05d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.510971 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd9f1aab-89f3-43d5-a18e-54220c1f05d8" (UID: "fd9f1aab-89f3-43d5-a18e-54220c1f05d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.562123 4916 generic.go:334] "Generic (PLEG): container finished" podID="38cae7c4-cc3b-41b0-9552-e85743db98ab" containerID="939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220" exitCode=137 Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.562202 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" event={"ID":"38cae7c4-cc3b-41b0-9552-e85743db98ab","Type":"ContainerDied","Data":"939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220"} Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.562227 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" event={"ID":"38cae7c4-cc3b-41b0-9552-e85743db98ab","Type":"ContainerDied","Data":"4ad08ed0ee9162fe4fa441de7c96c22d35eb276dc9564c29d77b645774943bef"} Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.562242 4916 scope.go:117] "RemoveContainer" containerID="939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.562334 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-86f5cb85df-p52sw" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.568116 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.568276 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"fd9f1aab-89f3-43d5-a18e-54220c1f05d8","Type":"ContainerDied","Data":"82bcdb77f8c90722c4a1439e2f4dc04d8825ae7fd5ace5a80a0111470ff7441e"} Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.572100 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4","Type":"ContainerStarted","Data":"153e0c2ad8bfc57166b50c1063fe328c8f2056e5fbbc50da95ebb7298f841d3e"} Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.581508 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data\") pod \"38cae7c4-cc3b-41b0-9552-e85743db98ab\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.581594 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-combined-ca-bundle\") pod \"38cae7c4-cc3b-41b0-9552-e85743db98ab\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.581665 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kb869\" (UniqueName: \"kubernetes.io/projected/38cae7c4-cc3b-41b0-9552-e85743db98ab-kube-api-access-kb869\") pod \"38cae7c4-cc3b-41b0-9552-e85743db98ab\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.581914 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data-custom\") pod \"38cae7c4-cc3b-41b0-9552-e85743db98ab\" (UID: \"38cae7c4-cc3b-41b0-9552-e85743db98ab\") " Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.582925 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.582980 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zl7n7\" (UniqueName: \"kubernetes.io/projected/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-kube-api-access-zl7n7\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.582992 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd9f1aab-89f3-43d5-a18e-54220c1f05d8-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.586648 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "38cae7c4-cc3b-41b0-9552-e85743db98ab" (UID: "38cae7c4-cc3b-41b0-9552-e85743db98ab"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.609904 4916 scope.go:117] "RemoveContainer" containerID="939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.611362 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38cae7c4-cc3b-41b0-9552-e85743db98ab-kube-api-access-kb869" (OuterVolumeSpecName: "kube-api-access-kb869") pod "38cae7c4-cc3b-41b0-9552-e85743db98ab" (UID: "38cae7c4-cc3b-41b0-9552-e85743db98ab"). InnerVolumeSpecName "kube-api-access-kb869". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:33 crc kubenswrapper[4916]: E1203 19:51:33.613665 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220\": container with ID starting with 939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220 not found: ID does not exist" containerID="939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.613707 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220"} err="failed to get container status \"939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220\": rpc error: code = NotFound desc = could not find container \"939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220\": container with ID starting with 939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220 not found: ID does not exist" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.613730 4916 scope.go:117] "RemoveContainer" containerID="d7e3ef3c5f0f6e7b2a1952b19d8c513b5b71a227840951818c797e135dbf829a" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.613824 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.637543 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.640600 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38cae7c4-cc3b-41b0-9552-e85743db98ab" (UID: "38cae7c4-cc3b-41b0-9552-e85743db98ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.650399 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:51:33 crc kubenswrapper[4916]: E1203 19:51:33.650919 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38cae7c4-cc3b-41b0-9552-e85743db98ab" containerName="heat-cfnapi" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.650937 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="38cae7c4-cc3b-41b0-9552-e85743db98ab" containerName="heat-cfnapi" Dec 03 19:51:33 crc kubenswrapper[4916]: E1203 19:51:33.650949 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd9f1aab-89f3-43d5-a18e-54220c1f05d8" containerName="nova-scheduler-scheduler" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.650955 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd9f1aab-89f3-43d5-a18e-54220c1f05d8" containerName="nova-scheduler-scheduler" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.651131 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="38cae7c4-cc3b-41b0-9552-e85743db98ab" containerName="heat-cfnapi" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.651155 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd9f1aab-89f3-43d5-a18e-54220c1f05d8" containerName="nova-scheduler-scheduler" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.651857 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.657017 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.675525 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data" (OuterVolumeSpecName: "config-data") pod "38cae7c4-cc3b-41b0-9552-e85743db98ab" (UID: "38cae7c4-cc3b-41b0-9552-e85743db98ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.676187 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.685062 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-config-data\") pod \"nova-scheduler-0\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.685133 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k99r4\" (UniqueName: \"kubernetes.io/projected/1f124358-6ae2-4378-80db-d30606496eca-kube-api-access-k99r4\") pod \"nova-scheduler-0\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.685290 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.685358 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.685379 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.685393 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kb869\" (UniqueName: \"kubernetes.io/projected/38cae7c4-cc3b-41b0-9552-e85743db98ab-kube-api-access-kb869\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.685405 4916 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/38cae7c4-cc3b-41b0-9552-e85743db98ab-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.787363 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.787587 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-config-data\") pod \"nova-scheduler-0\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.787669 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k99r4\" (UniqueName: \"kubernetes.io/projected/1f124358-6ae2-4378-80db-d30606496eca-kube-api-access-k99r4\") pod \"nova-scheduler-0\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.793113 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.803971 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-config-data\") pod \"nova-scheduler-0\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.804952 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k99r4\" (UniqueName: \"kubernetes.io/projected/1f124358-6ae2-4378-80db-d30606496eca-kube-api-access-k99r4\") pod \"nova-scheduler-0\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " pod="openstack/nova-scheduler-0" Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.908498 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-cfnapi-86f5cb85df-p52sw"] Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.917232 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-cfnapi-86f5cb85df-p52sw"] Dec 03 19:51:33 crc kubenswrapper[4916]: I1203 19:51:33.971256 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 19:51:34 crc kubenswrapper[4916]: I1203 19:51:34.456283 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:51:34 crc kubenswrapper[4916]: I1203 19:51:34.516873 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38cae7c4-cc3b-41b0-9552-e85743db98ab" path="/var/lib/kubelet/pods/38cae7c4-cc3b-41b0-9552-e85743db98ab/volumes" Dec 03 19:51:34 crc kubenswrapper[4916]: I1203 19:51:34.517418 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ded59053-6b2a-4d20-bd49-1a444e35ad2f" path="/var/lib/kubelet/pods/ded59053-6b2a-4d20-bd49-1a444e35ad2f/volumes" Dec 03 19:51:34 crc kubenswrapper[4916]: I1203 19:51:34.518020 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd9f1aab-89f3-43d5-a18e-54220c1f05d8" path="/var/lib/kubelet/pods/fd9f1aab-89f3-43d5-a18e-54220c1f05d8/volumes" Dec 03 19:51:34 crc kubenswrapper[4916]: I1203 19:51:34.590541 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4","Type":"ContainerStarted","Data":"4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac"} Dec 03 19:51:34 crc kubenswrapper[4916]: I1203 19:51:34.590610 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4","Type":"ContainerStarted","Data":"2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f"} Dec 03 19:51:34 crc kubenswrapper[4916]: I1203 19:51:34.594523 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1f124358-6ae2-4378-80db-d30606496eca","Type":"ContainerStarted","Data":"f0217d84ac18ac7cfc7ba81373791eb8e4a086a445c92ecbbc82cc34836a996a"} Dec 03 19:51:34 crc kubenswrapper[4916]: I1203 19:51:34.611760 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.61174675 podStartE2EDuration="2.61174675s" podCreationTimestamp="2025-12-03 19:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:51:34.608143394 +0000 UTC m=+1310.570953660" watchObservedRunningTime="2025-12-03 19:51:34.61174675 +0000 UTC m=+1310.574557016" Dec 03 19:51:35 crc kubenswrapper[4916]: I1203 19:51:35.616017 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1f124358-6ae2-4378-80db-d30606496eca","Type":"ContainerStarted","Data":"12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559"} Dec 03 19:51:35 crc kubenswrapper[4916]: I1203 19:51:35.656690 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.656664726 podStartE2EDuration="2.656664726s" podCreationTimestamp="2025-12-03 19:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:51:35.640256117 +0000 UTC m=+1311.603066423" watchObservedRunningTime="2025-12-03 19:51:35.656664726 +0000 UTC m=+1311.619475022" Dec 03 19:51:37 crc kubenswrapper[4916]: I1203 19:51:37.979757 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 03 19:51:38 crc kubenswrapper[4916]: I1203 19:51:38.971646 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 19:51:39 crc kubenswrapper[4916]: I1203 19:51:39.429887 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 19:51:42 crc kubenswrapper[4916]: I1203 19:51:42.939807 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 19:51:42 crc kubenswrapper[4916]: I1203 19:51:42.940549 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 19:51:43 crc kubenswrapper[4916]: I1203 19:51:43.971880 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 19:51:44 crc kubenswrapper[4916]: I1203 19:51:44.020127 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 19:51:44 crc kubenswrapper[4916]: I1203 19:51:44.021752 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 19:51:44 crc kubenswrapper[4916]: I1203 19:51:44.022099 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 19:51:44 crc kubenswrapper[4916]: I1203 19:51:44.720629 4916 generic.go:334] "Generic (PLEG): container finished" podID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerID="34d6bb7837af73a6c05714d08eb9f5ff8b6d104d1f28079359de0fd5f039de62" exitCode=137 Dec 03 19:51:44 crc kubenswrapper[4916]: I1203 19:51:44.721028 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1c749c4-0fe3-43e0-b6b7-2665fb34487d","Type":"ContainerDied","Data":"34d6bb7837af73a6c05714d08eb9f5ff8b6d104d1f28079359de0fd5f039de62"} Dec 03 19:51:44 crc kubenswrapper[4916]: I1203 19:51:44.780081 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.043541 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.127371 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-config-data\") pod \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.127446 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcghj\" (UniqueName: \"kubernetes.io/projected/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-kube-api-access-bcghj\") pod \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.127554 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-combined-ca-bundle\") pod \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.127776 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-run-httpd\") pod \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.127855 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-sg-core-conf-yaml\") pod \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.127885 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-scripts\") pod \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.127940 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-log-httpd\") pod \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\" (UID: \"a1c749c4-0fe3-43e0-b6b7-2665fb34487d\") " Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.129188 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a1c749c4-0fe3-43e0-b6b7-2665fb34487d" (UID: "a1c749c4-0fe3-43e0-b6b7-2665fb34487d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.129625 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a1c749c4-0fe3-43e0-b6b7-2665fb34487d" (UID: "a1c749c4-0fe3-43e0-b6b7-2665fb34487d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.131735 4916 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.131764 4916 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.134811 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-scripts" (OuterVolumeSpecName: "scripts") pod "a1c749c4-0fe3-43e0-b6b7-2665fb34487d" (UID: "a1c749c4-0fe3-43e0-b6b7-2665fb34487d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.136753 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-kube-api-access-bcghj" (OuterVolumeSpecName: "kube-api-access-bcghj") pod "a1c749c4-0fe3-43e0-b6b7-2665fb34487d" (UID: "a1c749c4-0fe3-43e0-b6b7-2665fb34487d"). InnerVolumeSpecName "kube-api-access-bcghj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.196344 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a1c749c4-0fe3-43e0-b6b7-2665fb34487d" (UID: "a1c749c4-0fe3-43e0-b6b7-2665fb34487d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.235967 4916 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.236693 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.236953 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcghj\" (UniqueName: \"kubernetes.io/projected/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-kube-api-access-bcghj\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.258388 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-config-data" (OuterVolumeSpecName: "config-data") pod "a1c749c4-0fe3-43e0-b6b7-2665fb34487d" (UID: "a1c749c4-0fe3-43e0-b6b7-2665fb34487d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.258914 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1c749c4-0fe3-43e0-b6b7-2665fb34487d" (UID: "a1c749c4-0fe3-43e0-b6b7-2665fb34487d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.338989 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.339017 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1c749c4-0fe3-43e0-b6b7-2665fb34487d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.735923 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a1c749c4-0fe3-43e0-b6b7-2665fb34487d","Type":"ContainerDied","Data":"de39ee0a56c0352514bcce2d61b77014ceb7317679d561d9c893c8a668449d41"} Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.735986 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.736017 4916 scope.go:117] "RemoveContainer" containerID="34d6bb7837af73a6c05714d08eb9f5ff8b6d104d1f28079359de0fd5f039de62" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.773289 4916 scope.go:117] "RemoveContainer" containerID="e2e05f7189350bea03548e8f77d41d26ab844167ea353142861825f749b8dae1" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.795097 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.799653 4916 scope.go:117] "RemoveContainer" containerID="31fbaf62b8c02196c57a896d5380ed9b70c60b372eb9d588cf2e51d98523cae2" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.805269 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.832355 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:45 crc kubenswrapper[4916]: E1203 19:51:45.832861 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="sg-core" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.832878 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="sg-core" Dec 03 19:51:45 crc kubenswrapper[4916]: E1203 19:51:45.832903 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="proxy-httpd" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.832911 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="proxy-httpd" Dec 03 19:51:45 crc kubenswrapper[4916]: E1203 19:51:45.832942 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="ceilometer-notification-agent" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.832951 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="ceilometer-notification-agent" Dec 03 19:51:45 crc kubenswrapper[4916]: E1203 19:51:45.832965 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="ceilometer-central-agent" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.832973 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="ceilometer-central-agent" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.833101 4916 scope.go:117] "RemoveContainer" containerID="7890dd1aa45019999238a08469d847a658b62dcc2791035d3c23e7b940b7d454" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.833196 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="ceilometer-central-agent" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.833222 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="sg-core" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.833241 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="ceilometer-notification-agent" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.833256 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" containerName="proxy-httpd" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.836841 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.841151 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.841479 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.873839 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.952639 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-config-data\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.952762 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbsc9\" (UniqueName: \"kubernetes.io/projected/76d34189-453c-4b25-9331-97b406be5b6f-kube-api-access-fbsc9\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.952805 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-log-httpd\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.952846 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.952867 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-run-httpd\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.952886 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:45 crc kubenswrapper[4916]: I1203 19:51:45.952918 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-scripts\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.058986 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.059632 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-run-httpd\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.059687 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.059766 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-scripts\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.059916 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-config-data\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.060015 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbsc9\" (UniqueName: \"kubernetes.io/projected/76d34189-453c-4b25-9331-97b406be5b6f-kube-api-access-fbsc9\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.060098 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-log-httpd\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.060161 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-run-httpd\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.063195 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-log-httpd\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.065998 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-scripts\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.067992 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-config-data\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.068773 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.077066 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.098298 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbsc9\" (UniqueName: \"kubernetes.io/projected/76d34189-453c-4b25-9331-97b406be5b6f-kube-api-access-fbsc9\") pod \"ceilometer-0\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.160126 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.162173 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.163447 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.165883 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f766f08ec381c0d446f946242779f93ec8affbc91dd83bc4db900247c021dcf7"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.166781 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://f766f08ec381c0d446f946242779f93ec8affbc91dd83bc4db900247c021dcf7" gracePeriod=600 Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.179716 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.488097 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1c749c4-0fe3-43e0-b6b7-2665fb34487d" path="/var/lib/kubelet/pods/a1c749c4-0fe3-43e0-b6b7-2665fb34487d/volumes" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.708206 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.749798 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="f766f08ec381c0d446f946242779f93ec8affbc91dd83bc4db900247c021dcf7" exitCode=0 Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.749871 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"f766f08ec381c0d446f946242779f93ec8affbc91dd83bc4db900247c021dcf7"} Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.749904 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72"} Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.749922 4916 scope.go:117] "RemoveContainer" containerID="ac2ebe3bbf276071a9bfb2a9d6c5b901691899bf5c59f5b451ee6d04eb0e197f" Dec 03 19:51:46 crc kubenswrapper[4916]: I1203 19:51:46.756193 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76d34189-453c-4b25-9331-97b406be5b6f","Type":"ContainerStarted","Data":"27737130cd1b5cec8c0ad2de5aad56be0fa7f6552cc431d7c42cff4eaf06e156"} Dec 03 19:51:47 crc kubenswrapper[4916]: I1203 19:51:47.766346 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76d34189-453c-4b25-9331-97b406be5b6f","Type":"ContainerStarted","Data":"af3886e679ea8c38486f9f4ef6fb5f5d41a612b305c8d06665dd453dc1afb881"} Dec 03 19:51:48 crc kubenswrapper[4916]: I1203 19:51:48.784898 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76d34189-453c-4b25-9331-97b406be5b6f","Type":"ContainerStarted","Data":"a4d8e30723a8ba08f02c84d971e22798ae185c4afa6fcf47d70e0c4fd7ef9adc"} Dec 03 19:51:49 crc kubenswrapper[4916]: I1203 19:51:49.816080 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76d34189-453c-4b25-9331-97b406be5b6f","Type":"ContainerStarted","Data":"ffe7e191d3a8fca7a985b6c5e340f0d86b212eed157976a45de44cc8b46722bf"} Dec 03 19:51:50 crc kubenswrapper[4916]: I1203 19:51:50.850263 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76d34189-453c-4b25-9331-97b406be5b6f","Type":"ContainerStarted","Data":"b4b41eb73ae277b0c1af88066043f8b93f41ce93945a46f8fe80be2f8d35339c"} Dec 03 19:51:50 crc kubenswrapper[4916]: I1203 19:51:50.851969 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 19:51:50 crc kubenswrapper[4916]: I1203 19:51:50.881187 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.768133084 podStartE2EDuration="5.881168889s" podCreationTimestamp="2025-12-03 19:51:45 +0000 UTC" firstStartedPulling="2025-12-03 19:51:46.703290476 +0000 UTC m=+1322.666100742" lastFinishedPulling="2025-12-03 19:51:49.816326271 +0000 UTC m=+1325.779136547" observedRunningTime="2025-12-03 19:51:50.876932386 +0000 UTC m=+1326.839742732" watchObservedRunningTime="2025-12-03 19:51:50.881168889 +0000 UTC m=+1326.843979165" Dec 03 19:51:51 crc kubenswrapper[4916]: W1203 19:51:51.420666 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd9f1aab_89f3_43d5_a18e_54220c1f05d8.slice/crio-82bcdb77f8c90722c4a1439e2f4dc04d8825ae7fd5ace5a80a0111470ff7441e WatchSource:0}: Error finding container 82bcdb77f8c90722c4a1439e2f4dc04d8825ae7fd5ace5a80a0111470ff7441e: Status 404 returned error can't find the container with id 82bcdb77f8c90722c4a1439e2f4dc04d8825ae7fd5ace5a80a0111470ff7441e Dec 03 19:51:51 crc kubenswrapper[4916]: E1203 19:51:51.721038 4916 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode34d9c63_d03a_453c_997e_1e47baa58589.slice/crio-conmon-d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59d15586_9ea1_4a06_b563_4acac206caeb.slice/crio-b06340a6e567e08435a2d1ac3c5c9bb85905d9247d2d117e31360a413c189093.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38cae7c4_cc3b_41b0_9552_e85743db98ab.slice/crio-conmon-939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38cae7c4_cc3b_41b0_9552_e85743db98ab.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode34d9c63_d03a_453c_997e_1e47baa58589.slice/crio-d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1c749c4_0fe3_43e0_b6b7_2665fb34487d.slice/crio-de39ee0a56c0352514bcce2d61b77014ceb7317679d561d9c893c8a668449d41\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cc773ef_1b60_461f_a7ac_2b8a23a1d04f.slice/crio-conmon-f766f08ec381c0d446f946242779f93ec8affbc91dd83bc4db900247c021dcf7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cc773ef_1b60_461f_a7ac_2b8a23a1d04f.slice/crio-f766f08ec381c0d446f946242779f93ec8affbc91dd83bc4db900247c021dcf7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1c749c4_0fe3_43e0_b6b7_2665fb34487d.slice/crio-34d6bb7837af73a6c05714d08eb9f5ff8b6d104d1f28079359de0fd5f039de62.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59d15586_9ea1_4a06_b563_4acac206caeb.slice/crio-conmon-b06340a6e567e08435a2d1ac3c5c9bb85905d9247d2d117e31360a413c189093.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1c749c4_0fe3_43e0_b6b7_2665fb34487d.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd9f1aab_89f3_43d5_a18e_54220c1f05d8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38cae7c4_cc3b_41b0_9552_e85743db98ab.slice/crio-4ad08ed0ee9162fe4fa441de7c96c22d35eb276dc9564c29d77b645774943bef\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1c749c4_0fe3_43e0_b6b7_2665fb34487d.slice/crio-conmon-34d6bb7837af73a6c05714d08eb9f5ff8b6d104d1f28079359de0fd5f039de62.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38cae7c4_cc3b_41b0_9552_e85743db98ab.slice/crio-939c749de4f830e0e02321554b7413eaf1c444d67d50e7e0bd853d36b577c220.scope\": RecentStats: unable to find data in memory cache]" Dec 03 19:51:51 crc kubenswrapper[4916]: I1203 19:51:51.886513 4916 generic.go:334] "Generic (PLEG): container finished" podID="59d15586-9ea1-4a06-b563-4acac206caeb" containerID="b06340a6e567e08435a2d1ac3c5c9bb85905d9247d2d117e31360a413c189093" exitCode=137 Dec 03 19:51:51 crc kubenswrapper[4916]: I1203 19:51:51.886649 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"59d15586-9ea1-4a06-b563-4acac206caeb","Type":"ContainerDied","Data":"b06340a6e567e08435a2d1ac3c5c9bb85905d9247d2d117e31360a413c189093"} Dec 03 19:51:51 crc kubenswrapper[4916]: I1203 19:51:51.889153 4916 generic.go:334] "Generic (PLEG): container finished" podID="e34d9c63-d03a-453c-997e-1e47baa58589" containerID="d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0" exitCode=137 Dec 03 19:51:51 crc kubenswrapper[4916]: I1203 19:51:51.889391 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e34d9c63-d03a-453c-997e-1e47baa58589","Type":"ContainerDied","Data":"d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0"} Dec 03 19:51:51 crc kubenswrapper[4916]: I1203 19:51:51.889521 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e34d9c63-d03a-453c-997e-1e47baa58589","Type":"ContainerDied","Data":"0bc68f795e7e3dd0d319840f4a90b9e293cdcafb08da1c012a5de40f4d4aaaad"} Dec 03 19:51:51 crc kubenswrapper[4916]: I1203 19:51:51.889659 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bc68f795e7e3dd0d319840f4a90b9e293cdcafb08da1c012a5de40f4d4aaaad" Dec 03 19:51:51 crc kubenswrapper[4916]: I1203 19:51:51.923749 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:51 crc kubenswrapper[4916]: I1203 19:51:51.925319 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.088308 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e34d9c63-d03a-453c-997e-1e47baa58589-logs\") pod \"e34d9c63-d03a-453c-997e-1e47baa58589\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.088861 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e34d9c63-d03a-453c-997e-1e47baa58589-logs" (OuterVolumeSpecName: "logs") pod "e34d9c63-d03a-453c-997e-1e47baa58589" (UID: "e34d9c63-d03a-453c-997e-1e47baa58589"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.088871 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-config-data\") pod \"59d15586-9ea1-4a06-b563-4acac206caeb\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.088960 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmfkv\" (UniqueName: \"kubernetes.io/projected/59d15586-9ea1-4a06-b563-4acac206caeb-kube-api-access-nmfkv\") pod \"59d15586-9ea1-4a06-b563-4acac206caeb\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.089062 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xzhc\" (UniqueName: \"kubernetes.io/projected/e34d9c63-d03a-453c-997e-1e47baa58589-kube-api-access-9xzhc\") pod \"e34d9c63-d03a-453c-997e-1e47baa58589\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.089099 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-combined-ca-bundle\") pod \"e34d9c63-d03a-453c-997e-1e47baa58589\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.089139 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-combined-ca-bundle\") pod \"59d15586-9ea1-4a06-b563-4acac206caeb\" (UID: \"59d15586-9ea1-4a06-b563-4acac206caeb\") " Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.089868 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-config-data\") pod \"e34d9c63-d03a-453c-997e-1e47baa58589\" (UID: \"e34d9c63-d03a-453c-997e-1e47baa58589\") " Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.090672 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e34d9c63-d03a-453c-997e-1e47baa58589-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.094513 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e34d9c63-d03a-453c-997e-1e47baa58589-kube-api-access-9xzhc" (OuterVolumeSpecName: "kube-api-access-9xzhc") pod "e34d9c63-d03a-453c-997e-1e47baa58589" (UID: "e34d9c63-d03a-453c-997e-1e47baa58589"). InnerVolumeSpecName "kube-api-access-9xzhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.094751 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59d15586-9ea1-4a06-b563-4acac206caeb-kube-api-access-nmfkv" (OuterVolumeSpecName: "kube-api-access-nmfkv") pod "59d15586-9ea1-4a06-b563-4acac206caeb" (UID: "59d15586-9ea1-4a06-b563-4acac206caeb"). InnerVolumeSpecName "kube-api-access-nmfkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.115181 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-config-data" (OuterVolumeSpecName: "config-data") pod "e34d9c63-d03a-453c-997e-1e47baa58589" (UID: "e34d9c63-d03a-453c-997e-1e47baa58589"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.115889 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e34d9c63-d03a-453c-997e-1e47baa58589" (UID: "e34d9c63-d03a-453c-997e-1e47baa58589"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.116098 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-config-data" (OuterVolumeSpecName: "config-data") pod "59d15586-9ea1-4a06-b563-4acac206caeb" (UID: "59d15586-9ea1-4a06-b563-4acac206caeb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.134021 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "59d15586-9ea1-4a06-b563-4acac206caeb" (UID: "59d15586-9ea1-4a06-b563-4acac206caeb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.192953 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.193027 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmfkv\" (UniqueName: \"kubernetes.io/projected/59d15586-9ea1-4a06-b563-4acac206caeb-kube-api-access-nmfkv\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.193061 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xzhc\" (UniqueName: \"kubernetes.io/projected/e34d9c63-d03a-453c-997e-1e47baa58589-kube-api-access-9xzhc\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.193087 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.193108 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59d15586-9ea1-4a06-b563-4acac206caeb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.193128 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e34d9c63-d03a-453c-997e-1e47baa58589-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.901456 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"59d15586-9ea1-4a06-b563-4acac206caeb","Type":"ContainerDied","Data":"8575de5f3b4b5e87942106fd14e16205f2375a16d58d94d8dae78d92792ef8b4"} Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.901517 4916 scope.go:117] "RemoveContainer" containerID="b06340a6e567e08435a2d1ac3c5c9bb85905d9247d2d117e31360a413c189093" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.901686 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.902734 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.938336 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.953749 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.954237 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.955601 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.974652 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.978628 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 19:51:52 crc kubenswrapper[4916]: I1203 19:51:52.984692 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:52.998169 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:51:53 crc kubenswrapper[4916]: E1203 19:51:52.998774 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e34d9c63-d03a-453c-997e-1e47baa58589" containerName="nova-metadata-log" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:52.998795 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e34d9c63-d03a-453c-997e-1e47baa58589" containerName="nova-metadata-log" Dec 03 19:51:53 crc kubenswrapper[4916]: E1203 19:51:52.998814 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59d15586-9ea1-4a06-b563-4acac206caeb" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:52.998825 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="59d15586-9ea1-4a06-b563-4acac206caeb" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 19:51:53 crc kubenswrapper[4916]: E1203 19:51:52.998858 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e34d9c63-d03a-453c-997e-1e47baa58589" containerName="nova-metadata-metadata" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:52.998868 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e34d9c63-d03a-453c-997e-1e47baa58589" containerName="nova-metadata-metadata" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:52.999128 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="e34d9c63-d03a-453c-997e-1e47baa58589" containerName="nova-metadata-metadata" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:52.999170 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="59d15586-9ea1-4a06-b563-4acac206caeb" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:52.999197 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="e34d9c63-d03a-453c-997e-1e47baa58589" containerName="nova-metadata-log" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.000723 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.003511 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.004301 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.008604 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.010848 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.013673 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.014434 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.014644 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.032345 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.047951 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.087546 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.113850 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-config-data\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.113907 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.113952 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/721211b9-a2ae-4a64-bc3c-f243277baf71-logs\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.114031 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptnpm\" (UniqueName: \"kubernetes.io/projected/afa6fb7e-4053-4afb-89d5-2bce4d35c456-kube-api-access-ptnpm\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.114061 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.114091 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.114111 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.114128 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt7vs\" (UniqueName: \"kubernetes.io/projected/721211b9-a2ae-4a64-bc3c-f243277baf71-kube-api-access-jt7vs\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.114159 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.114176 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.216193 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-config-data\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.216265 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.216308 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/721211b9-a2ae-4a64-bc3c-f243277baf71-logs\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.216362 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptnpm\" (UniqueName: \"kubernetes.io/projected/afa6fb7e-4053-4afb-89d5-2bce4d35c456-kube-api-access-ptnpm\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.216389 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.216416 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.216435 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.216451 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt7vs\" (UniqueName: \"kubernetes.io/projected/721211b9-a2ae-4a64-bc3c-f243277baf71-kube-api-access-jt7vs\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.216483 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.216498 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.218224 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/721211b9-a2ae-4a64-bc3c-f243277baf71-logs\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.220320 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.220340 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.221203 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.221633 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.222025 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-config-data\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.223928 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afa6fb7e-4053-4afb-89d5-2bce4d35c456-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.232067 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.237203 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt7vs\" (UniqueName: \"kubernetes.io/projected/721211b9-a2ae-4a64-bc3c-f243277baf71-kube-api-access-jt7vs\") pod \"nova-metadata-0\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.237801 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptnpm\" (UniqueName: \"kubernetes.io/projected/afa6fb7e-4053-4afb-89d5-2bce4d35c456-kube-api-access-ptnpm\") pod \"nova-cell1-novncproxy-0\" (UID: \"afa6fb7e-4053-4afb-89d5-2bce4d35c456\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.354629 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.379611 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.847761 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.918440 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"721211b9-a2ae-4a64-bc3c-f243277baf71","Type":"ContainerStarted","Data":"a11184523cf07db5ff24a0700dd5ee7bb0587b0be38585c849acdb72d5eb4581"} Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.922920 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.926999 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 19:51:53 crc kubenswrapper[4916]: I1203 19:51:53.928670 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.138712 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b7bbf7cf9-2drw5"] Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.147140 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.240024 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7bbf7cf9-2drw5"] Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.344503 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.344600 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.344643 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-svc\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.344664 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgr9d\" (UniqueName: \"kubernetes.io/projected/646d792d-4828-48b7-955a-2108efeb5ebc-kube-api-access-wgr9d\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.344681 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-config\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.344702 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.446649 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.447162 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-svc\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.447230 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgr9d\" (UniqueName: \"kubernetes.io/projected/646d792d-4828-48b7-955a-2108efeb5ebc-kube-api-access-wgr9d\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.447260 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-config\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.448416 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-svc\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.448542 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-config\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.448710 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.449255 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.449329 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.449887 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.450128 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.474391 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgr9d\" (UniqueName: \"kubernetes.io/projected/646d792d-4828-48b7-955a-2108efeb5ebc-kube-api-access-wgr9d\") pod \"dnsmasq-dns-6b7bbf7cf9-2drw5\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.490001 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59d15586-9ea1-4a06-b563-4acac206caeb" path="/var/lib/kubelet/pods/59d15586-9ea1-4a06-b563-4acac206caeb/volumes" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.490578 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e34d9c63-d03a-453c-997e-1e47baa58589" path="/var/lib/kubelet/pods/e34d9c63-d03a-453c-997e-1e47baa58589/volumes" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.568154 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.946180 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"721211b9-a2ae-4a64-bc3c-f243277baf71","Type":"ContainerStarted","Data":"fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e"} Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.946587 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"721211b9-a2ae-4a64-bc3c-f243277baf71","Type":"ContainerStarted","Data":"2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba"} Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.957307 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"afa6fb7e-4053-4afb-89d5-2bce4d35c456","Type":"ContainerStarted","Data":"107c640f62b885ec818562d3dde20bcee8b68c302dfaabf8e8ab65734ec481ed"} Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.957356 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"afa6fb7e-4053-4afb-89d5-2bce4d35c456","Type":"ContainerStarted","Data":"3fe6a1fdad148c774f1426c46f41a51fbd29e94d242ace47dd6e1eab6e6452de"} Dec 03 19:51:54 crc kubenswrapper[4916]: I1203 19:51:54.978900 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.978883357 podStartE2EDuration="2.978883357s" podCreationTimestamp="2025-12-03 19:51:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:51:54.9651572 +0000 UTC m=+1330.927967466" watchObservedRunningTime="2025-12-03 19:51:54.978883357 +0000 UTC m=+1330.941693623" Dec 03 19:51:55 crc kubenswrapper[4916]: I1203 19:51:55.050707 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.050687398 podStartE2EDuration="3.050687398s" podCreationTimestamp="2025-12-03 19:51:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:51:55.000590398 +0000 UTC m=+1330.963400674" watchObservedRunningTime="2025-12-03 19:51:55.050687398 +0000 UTC m=+1331.013497664" Dec 03 19:51:55 crc kubenswrapper[4916]: I1203 19:51:55.057803 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7bbf7cf9-2drw5"] Dec 03 19:51:55 crc kubenswrapper[4916]: I1203 19:51:55.966862 4916 generic.go:334] "Generic (PLEG): container finished" podID="646d792d-4828-48b7-955a-2108efeb5ebc" containerID="7fd987c0f21cff9797a9b36b54de4c59cc99415a30ca78b04a004b25038f0294" exitCode=0 Dec 03 19:51:55 crc kubenswrapper[4916]: I1203 19:51:55.966918 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" event={"ID":"646d792d-4828-48b7-955a-2108efeb5ebc","Type":"ContainerDied","Data":"7fd987c0f21cff9797a9b36b54de4c59cc99415a30ca78b04a004b25038f0294"} Dec 03 19:51:55 crc kubenswrapper[4916]: I1203 19:51:55.967868 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" event={"ID":"646d792d-4828-48b7-955a-2108efeb5ebc","Type":"ContainerStarted","Data":"9ea00c5ead358d3005df525be168ec241c17e02cec34c6d0da6985eb3ef11412"} Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.491348 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.694348 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.694774 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="ceilometer-central-agent" containerID="cri-o://af3886e679ea8c38486f9f4ef6fb5f5d41a612b305c8d06665dd453dc1afb881" gracePeriod=30 Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.694847 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="sg-core" containerID="cri-o://ffe7e191d3a8fca7a985b6c5e340f0d86b212eed157976a45de44cc8b46722bf" gracePeriod=30 Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.694899 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="ceilometer-notification-agent" containerID="cri-o://a4d8e30723a8ba08f02c84d971e22798ae185c4afa6fcf47d70e0c4fd7ef9adc" gracePeriod=30 Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.694967 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="proxy-httpd" containerID="cri-o://b4b41eb73ae277b0c1af88066043f8b93f41ce93945a46f8fe80be2f8d35339c" gracePeriod=30 Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.986001 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" event={"ID":"646d792d-4828-48b7-955a-2108efeb5ebc","Type":"ContainerStarted","Data":"3b5a058e96668f6b872e5a7e9fa56bb9461f0a2729095081a373414b52ab9699"} Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.986335 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.997469 4916 generic.go:334] "Generic (PLEG): container finished" podID="76d34189-453c-4b25-9331-97b406be5b6f" containerID="b4b41eb73ae277b0c1af88066043f8b93f41ce93945a46f8fe80be2f8d35339c" exitCode=0 Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.997498 4916 generic.go:334] "Generic (PLEG): container finished" podID="76d34189-453c-4b25-9331-97b406be5b6f" containerID="ffe7e191d3a8fca7a985b6c5e340f0d86b212eed157976a45de44cc8b46722bf" exitCode=2 Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.997555 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76d34189-453c-4b25-9331-97b406be5b6f","Type":"ContainerDied","Data":"b4b41eb73ae277b0c1af88066043f8b93f41ce93945a46f8fe80be2f8d35339c"} Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.997631 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76d34189-453c-4b25-9331-97b406be5b6f","Type":"ContainerDied","Data":"ffe7e191d3a8fca7a985b6c5e340f0d86b212eed157976a45de44cc8b46722bf"} Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.997682 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerName="nova-api-log" containerID="cri-o://2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f" gracePeriod=30 Dec 03 19:51:56 crc kubenswrapper[4916]: I1203 19:51:56.997739 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerName="nova-api-api" containerID="cri-o://4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac" gracePeriod=30 Dec 03 19:51:57 crc kubenswrapper[4916]: I1203 19:51:57.025398 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" podStartSLOduration=3.025378719 podStartE2EDuration="3.025378719s" podCreationTimestamp="2025-12-03 19:51:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:51:57.006172956 +0000 UTC m=+1332.968983222" watchObservedRunningTime="2025-12-03 19:51:57.025378719 +0000 UTC m=+1332.988188985" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.012217 4916 generic.go:334] "Generic (PLEG): container finished" podID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerID="2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f" exitCode=143 Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.012305 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4","Type":"ContainerDied","Data":"2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f"} Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.015034 4916 generic.go:334] "Generic (PLEG): container finished" podID="76d34189-453c-4b25-9331-97b406be5b6f" containerID="a4d8e30723a8ba08f02c84d971e22798ae185c4afa6fcf47d70e0c4fd7ef9adc" exitCode=0 Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.015052 4916 generic.go:334] "Generic (PLEG): container finished" podID="76d34189-453c-4b25-9331-97b406be5b6f" containerID="af3886e679ea8c38486f9f4ef6fb5f5d41a612b305c8d06665dd453dc1afb881" exitCode=0 Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.016114 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76d34189-453c-4b25-9331-97b406be5b6f","Type":"ContainerDied","Data":"a4d8e30723a8ba08f02c84d971e22798ae185c4afa6fcf47d70e0c4fd7ef9adc"} Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.016138 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76d34189-453c-4b25-9331-97b406be5b6f","Type":"ContainerDied","Data":"af3886e679ea8c38486f9f4ef6fb5f5d41a612b305c8d06665dd453dc1afb881"} Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.342295 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.354863 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.355147 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.380025 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.451866 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-scripts\") pod \"76d34189-453c-4b25-9331-97b406be5b6f\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.452605 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-sg-core-conf-yaml\") pod \"76d34189-453c-4b25-9331-97b406be5b6f\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.452674 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbsc9\" (UniqueName: \"kubernetes.io/projected/76d34189-453c-4b25-9331-97b406be5b6f-kube-api-access-fbsc9\") pod \"76d34189-453c-4b25-9331-97b406be5b6f\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.452693 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-run-httpd\") pod \"76d34189-453c-4b25-9331-97b406be5b6f\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.452710 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-config-data\") pod \"76d34189-453c-4b25-9331-97b406be5b6f\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.452741 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-log-httpd\") pod \"76d34189-453c-4b25-9331-97b406be5b6f\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.452761 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-combined-ca-bundle\") pod \"76d34189-453c-4b25-9331-97b406be5b6f\" (UID: \"76d34189-453c-4b25-9331-97b406be5b6f\") " Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.453364 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "76d34189-453c-4b25-9331-97b406be5b6f" (UID: "76d34189-453c-4b25-9331-97b406be5b6f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.454395 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "76d34189-453c-4b25-9331-97b406be5b6f" (UID: "76d34189-453c-4b25-9331-97b406be5b6f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.458085 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-scripts" (OuterVolumeSpecName: "scripts") pod "76d34189-453c-4b25-9331-97b406be5b6f" (UID: "76d34189-453c-4b25-9331-97b406be5b6f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.459135 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76d34189-453c-4b25-9331-97b406be5b6f-kube-api-access-fbsc9" (OuterVolumeSpecName: "kube-api-access-fbsc9") pod "76d34189-453c-4b25-9331-97b406be5b6f" (UID: "76d34189-453c-4b25-9331-97b406be5b6f"). InnerVolumeSpecName "kube-api-access-fbsc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.491382 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "76d34189-453c-4b25-9331-97b406be5b6f" (UID: "76d34189-453c-4b25-9331-97b406be5b6f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.547335 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76d34189-453c-4b25-9331-97b406be5b6f" (UID: "76d34189-453c-4b25-9331-97b406be5b6f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.556890 4916 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.556941 4916 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.556961 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbsc9\" (UniqueName: \"kubernetes.io/projected/76d34189-453c-4b25-9331-97b406be5b6f-kube-api-access-fbsc9\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.556982 4916 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/76d34189-453c-4b25-9331-97b406be5b6f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.557001 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.557018 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.565354 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-config-data" (OuterVolumeSpecName: "config-data") pod "76d34189-453c-4b25-9331-97b406be5b6f" (UID: "76d34189-453c-4b25-9331-97b406be5b6f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:51:58 crc kubenswrapper[4916]: I1203 19:51:58.658853 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76d34189-453c-4b25-9331-97b406be5b6f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.048464 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"76d34189-453c-4b25-9331-97b406be5b6f","Type":"ContainerDied","Data":"27737130cd1b5cec8c0ad2de5aad56be0fa7f6552cc431d7c42cff4eaf06e156"} Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.048916 4916 scope.go:117] "RemoveContainer" containerID="b4b41eb73ae277b0c1af88066043f8b93f41ce93945a46f8fe80be2f8d35339c" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.048496 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.081405 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.089698 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.091845 4916 scope.go:117] "RemoveContainer" containerID="ffe7e191d3a8fca7a985b6c5e340f0d86b212eed157976a45de44cc8b46722bf" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.140841 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:59 crc kubenswrapper[4916]: E1203 19:51:59.141347 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="ceilometer-notification-agent" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.141363 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="ceilometer-notification-agent" Dec 03 19:51:59 crc kubenswrapper[4916]: E1203 19:51:59.141389 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="ceilometer-central-agent" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.141399 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="ceilometer-central-agent" Dec 03 19:51:59 crc kubenswrapper[4916]: E1203 19:51:59.141428 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="proxy-httpd" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.141438 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="proxy-httpd" Dec 03 19:51:59 crc kubenswrapper[4916]: E1203 19:51:59.141451 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="sg-core" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.141460 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="sg-core" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.141845 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="ceilometer-central-agent" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.141899 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="ceilometer-notification-agent" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.141925 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="proxy-httpd" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.141947 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="76d34189-453c-4b25-9331-97b406be5b6f" containerName="sg-core" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.145098 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.145207 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.157558 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.157850 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.167941 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-run-httpd\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.167975 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-log-httpd\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.168001 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbp6k\" (UniqueName: \"kubernetes.io/projected/b4cae38b-15b3-49c5-933f-ef683a1b2d99-kube-api-access-zbp6k\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.168019 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-config-data\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.168104 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.168124 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-scripts\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.168375 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.202297 4916 scope.go:117] "RemoveContainer" containerID="a4d8e30723a8ba08f02c84d971e22798ae185c4afa6fcf47d70e0c4fd7ef9adc" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.223682 4916 scope.go:117] "RemoveContainer" containerID="af3886e679ea8c38486f9f4ef6fb5f5d41a612b305c8d06665dd453dc1afb881" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.269865 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.269982 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-run-httpd\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.270011 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-log-httpd\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.270048 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbp6k\" (UniqueName: \"kubernetes.io/projected/b4cae38b-15b3-49c5-933f-ef683a1b2d99-kube-api-access-zbp6k\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.270075 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-config-data\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.270120 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.270149 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-scripts\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.270672 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-run-httpd\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.271389 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-log-httpd\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.275203 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-scripts\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.275529 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-config-data\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.276101 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.277776 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.286015 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbp6k\" (UniqueName: \"kubernetes.io/projected/b4cae38b-15b3-49c5-933f-ef683a1b2d99-kube-api-access-zbp6k\") pod \"ceilometer-0\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.484199 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:51:59 crc kubenswrapper[4916]: I1203 19:51:59.744964 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.060718 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4cae38b-15b3-49c5-933f-ef683a1b2d99","Type":"ContainerStarted","Data":"739330fab195a175ebffe6d9b42bee8df1401fee5556d79fa1256f356af2bc38"} Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.490415 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76d34189-453c-4b25-9331-97b406be5b6f" path="/var/lib/kubelet/pods/76d34189-453c-4b25-9331-97b406be5b6f/volumes" Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.595917 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.811702 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6rwc\" (UniqueName: \"kubernetes.io/projected/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-kube-api-access-k6rwc\") pod \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.812007 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-logs\") pod \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.812036 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-config-data\") pod \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.812065 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-combined-ca-bundle\") pod \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\" (UID: \"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4\") " Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.812675 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-logs" (OuterVolumeSpecName: "logs") pod "5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" (UID: "5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.816252 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-kube-api-access-k6rwc" (OuterVolumeSpecName: "kube-api-access-k6rwc") pod "5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" (UID: "5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4"). InnerVolumeSpecName "kube-api-access-k6rwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.845627 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-config-data" (OuterVolumeSpecName: "config-data") pod "5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" (UID: "5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.855762 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" (UID: "5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.913775 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.913805 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.913814 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:00 crc kubenswrapper[4916]: I1203 19:52:00.913825 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6rwc\" (UniqueName: \"kubernetes.io/projected/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4-kube-api-access-k6rwc\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.071648 4916 generic.go:334] "Generic (PLEG): container finished" podID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerID="4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac" exitCode=0 Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.071712 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4","Type":"ContainerDied","Data":"4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac"} Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.071737 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4","Type":"ContainerDied","Data":"153e0c2ad8bfc57166b50c1063fe328c8f2056e5fbbc50da95ebb7298f841d3e"} Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.071744 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.071752 4916 scope.go:117] "RemoveContainer" containerID="4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.074228 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4cae38b-15b3-49c5-933f-ef683a1b2d99","Type":"ContainerStarted","Data":"539d9d5138bea595449b7c9d9da4ac6dd47681eff3b12ba75a5e6559c484ee46"} Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.101009 4916 scope.go:117] "RemoveContainer" containerID="2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.102011 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.121676 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.123725 4916 scope.go:117] "RemoveContainer" containerID="4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac" Dec 03 19:52:01 crc kubenswrapper[4916]: E1203 19:52:01.124152 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac\": container with ID starting with 4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac not found: ID does not exist" containerID="4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.124189 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac"} err="failed to get container status \"4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac\": rpc error: code = NotFound desc = could not find container \"4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac\": container with ID starting with 4dd29588ebf50a351764ef501196e9b36c147d0211d5052a7b5de2397da30dac not found: ID does not exist" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.124208 4916 scope.go:117] "RemoveContainer" containerID="2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f" Dec 03 19:52:01 crc kubenswrapper[4916]: E1203 19:52:01.124493 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f\": container with ID starting with 2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f not found: ID does not exist" containerID="2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.124533 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f"} err="failed to get container status \"2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f\": rpc error: code = NotFound desc = could not find container \"2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f\": container with ID starting with 2bb30aebeec46dc140a87b2cb43960d2634767d674e9533492f9b12473f9333f not found: ID does not exist" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.142632 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 19:52:01 crc kubenswrapper[4916]: E1203 19:52:01.143141 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerName="nova-api-log" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.143161 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerName="nova-api-log" Dec 03 19:52:01 crc kubenswrapper[4916]: E1203 19:52:01.143180 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerName="nova-api-api" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.143188 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerName="nova-api-api" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.143450 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerName="nova-api-log" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.143476 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" containerName="nova-api-api" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.144702 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.149553 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.149861 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.150438 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.158792 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.219412 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.219470 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-public-tls-certs\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.219493 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-config-data\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.219575 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.219642 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgj7l\" (UniqueName: \"kubernetes.io/projected/1ea731d4-086e-4b79-83c4-d7179c941d04-kube-api-access-fgj7l\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.219686 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ea731d4-086e-4b79-83c4-d7179c941d04-logs\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.320876 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgj7l\" (UniqueName: \"kubernetes.io/projected/1ea731d4-086e-4b79-83c4-d7179c941d04-kube-api-access-fgj7l\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.320945 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ea731d4-086e-4b79-83c4-d7179c941d04-logs\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.321073 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.321102 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-public-tls-certs\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.321122 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-config-data\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.321158 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.321681 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ea731d4-086e-4b79-83c4-d7179c941d04-logs\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.325195 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.325518 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-internal-tls-certs\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.326255 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-config-data\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.326593 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-public-tls-certs\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.337199 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgj7l\" (UniqueName: \"kubernetes.io/projected/1ea731d4-086e-4b79-83c4-d7179c941d04-kube-api-access-fgj7l\") pod \"nova-api-0\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.471304 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:52:01 crc kubenswrapper[4916]: I1203 19:52:01.980538 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:52:01 crc kubenswrapper[4916]: W1203 19:52:01.990727 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ea731d4_086e_4b79_83c4_d7179c941d04.slice/crio-c461009d65682cf48d5878c401ffe0552e3b8ffe9c1a11b2570037acc40f821e WatchSource:0}: Error finding container c461009d65682cf48d5878c401ffe0552e3b8ffe9c1a11b2570037acc40f821e: Status 404 returned error can't find the container with id c461009d65682cf48d5878c401ffe0552e3b8ffe9c1a11b2570037acc40f821e Dec 03 19:52:02 crc kubenswrapper[4916]: E1203 19:52:02.017386 4916 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode34d9c63_d03a_453c_997e_1e47baa58589.slice/crio-conmon-d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0.scope\": RecentStats: unable to find data in memory cache]" Dec 03 19:52:02 crc kubenswrapper[4916]: I1203 19:52:02.100533 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4cae38b-15b3-49c5-933f-ef683a1b2d99","Type":"ContainerStarted","Data":"5282e42e7fb601656bdd00b5a01d046b835b137c0ced165c5fcda394a26ec979"} Dec 03 19:52:02 crc kubenswrapper[4916]: I1203 19:52:02.101969 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1ea731d4-086e-4b79-83c4-d7179c941d04","Type":"ContainerStarted","Data":"c461009d65682cf48d5878c401ffe0552e3b8ffe9c1a11b2570037acc40f821e"} Dec 03 19:52:02 crc kubenswrapper[4916]: I1203 19:52:02.492399 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4" path="/var/lib/kubelet/pods/5773bd1f-36d2-4ddf-bb1b-da64d61a4fe4/volumes" Dec 03 19:52:02 crc kubenswrapper[4916]: I1203 19:52:02.566064 4916 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podded59053-6b2a-4d20-bd49-1a444e35ad2f"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podded59053-6b2a-4d20-bd49-1a444e35ad2f] : Timed out while waiting for systemd to remove kubepods-besteffort-podded59053_6b2a_4d20_bd49_1a444e35ad2f.slice" Dec 03 19:52:03 crc kubenswrapper[4916]: I1203 19:52:03.117112 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4cae38b-15b3-49c5-933f-ef683a1b2d99","Type":"ContainerStarted","Data":"7a17df25de48e60e8e28349bef821cfb6ee717ebac574c9c6f2647879af6b326"} Dec 03 19:52:03 crc kubenswrapper[4916]: I1203 19:52:03.119249 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1ea731d4-086e-4b79-83c4-d7179c941d04","Type":"ContainerStarted","Data":"a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6"} Dec 03 19:52:03 crc kubenswrapper[4916]: I1203 19:52:03.119309 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1ea731d4-086e-4b79-83c4-d7179c941d04","Type":"ContainerStarted","Data":"af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36"} Dec 03 19:52:03 crc kubenswrapper[4916]: I1203 19:52:03.167768 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.16774233 podStartE2EDuration="2.16774233s" podCreationTimestamp="2025-12-03 19:52:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:52:03.152948934 +0000 UTC m=+1339.115759240" watchObservedRunningTime="2025-12-03 19:52:03.16774233 +0000 UTC m=+1339.130552606" Dec 03 19:52:03 crc kubenswrapper[4916]: I1203 19:52:03.355675 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 19:52:03 crc kubenswrapper[4916]: I1203 19:52:03.355723 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 19:52:03 crc kubenswrapper[4916]: I1203 19:52:03.379873 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:52:03 crc kubenswrapper[4916]: I1203 19:52:03.405909 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.131886 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4cae38b-15b3-49c5-933f-ef683a1b2d99","Type":"ContainerStarted","Data":"26f91255685944b93a16c424a56b1099e2d8f7f7376b5f056e52388f93f4a2e1"} Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.132499 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.161340 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.174227 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.253790639 podStartE2EDuration="5.174201897s" podCreationTimestamp="2025-12-03 19:51:59 +0000 UTC" firstStartedPulling="2025-12-03 19:51:59.747207091 +0000 UTC m=+1335.710017367" lastFinishedPulling="2025-12-03 19:52:03.667618359 +0000 UTC m=+1339.630428625" observedRunningTime="2025-12-03 19:52:04.167898268 +0000 UTC m=+1340.130708554" watchObservedRunningTime="2025-12-03 19:52:04.174201897 +0000 UTC m=+1340.137012173" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.336291 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-5t7sm"] Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.337603 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.339891 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.340912 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.365064 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-5t7sm"] Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.366852 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.366990 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.479584 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-config-data\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.479644 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-scripts\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.479731 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.479764 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dtzg\" (UniqueName: \"kubernetes.io/projected/80531945-5ede-449d-9903-5fe49857e211-kube-api-access-8dtzg\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.569774 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.582422 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dtzg\" (UniqueName: \"kubernetes.io/projected/80531945-5ede-449d-9903-5fe49857e211-kube-api-access-8dtzg\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.582528 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-config-data\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.582590 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-scripts\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.582653 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.589790 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.596865 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-config-data\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.597169 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-scripts\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.602785 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dtzg\" (UniqueName: \"kubernetes.io/projected/80531945-5ede-449d-9903-5fe49857e211-kube-api-access-8dtzg\") pod \"nova-cell1-cell-mapping-5t7sm\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.662038 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9b86998b5-j4z8k"] Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.662264 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" podUID="c1ba18bf-6eb8-445c-b03c-46c867d80430" containerName="dnsmasq-dns" containerID="cri-o://897a04a7dec9589a3e29cd25e43cf4d06c573332914d31ade95414f1996cd25b" gracePeriod=10 Dec 03 19:52:04 crc kubenswrapper[4916]: I1203 19:52:04.671526 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.145422 4916 generic.go:334] "Generic (PLEG): container finished" podID="c1ba18bf-6eb8-445c-b03c-46c867d80430" containerID="897a04a7dec9589a3e29cd25e43cf4d06c573332914d31ade95414f1996cd25b" exitCode=0 Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.145685 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" event={"ID":"c1ba18bf-6eb8-445c-b03c-46c867d80430","Type":"ContainerDied","Data":"897a04a7dec9589a3e29cd25e43cf4d06c573332914d31ade95414f1996cd25b"} Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.212039 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.214185 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-5t7sm"] Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.396737 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-nb\") pod \"c1ba18bf-6eb8-445c-b03c-46c867d80430\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.397222 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-config\") pod \"c1ba18bf-6eb8-445c-b03c-46c867d80430\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.397286 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-swift-storage-0\") pod \"c1ba18bf-6eb8-445c-b03c-46c867d80430\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.397328 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4p8s\" (UniqueName: \"kubernetes.io/projected/c1ba18bf-6eb8-445c-b03c-46c867d80430-kube-api-access-r4p8s\") pod \"c1ba18bf-6eb8-445c-b03c-46c867d80430\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.397401 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-svc\") pod \"c1ba18bf-6eb8-445c-b03c-46c867d80430\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.397516 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-sb\") pod \"c1ba18bf-6eb8-445c-b03c-46c867d80430\" (UID: \"c1ba18bf-6eb8-445c-b03c-46c867d80430\") " Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.405749 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1ba18bf-6eb8-445c-b03c-46c867d80430-kube-api-access-r4p8s" (OuterVolumeSpecName: "kube-api-access-r4p8s") pod "c1ba18bf-6eb8-445c-b03c-46c867d80430" (UID: "c1ba18bf-6eb8-445c-b03c-46c867d80430"). InnerVolumeSpecName "kube-api-access-r4p8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.448092 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-config" (OuterVolumeSpecName: "config") pod "c1ba18bf-6eb8-445c-b03c-46c867d80430" (UID: "c1ba18bf-6eb8-445c-b03c-46c867d80430"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.449425 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c1ba18bf-6eb8-445c-b03c-46c867d80430" (UID: "c1ba18bf-6eb8-445c-b03c-46c867d80430"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.449944 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c1ba18bf-6eb8-445c-b03c-46c867d80430" (UID: "c1ba18bf-6eb8-445c-b03c-46c867d80430"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.450469 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c1ba18bf-6eb8-445c-b03c-46c867d80430" (UID: "c1ba18bf-6eb8-445c-b03c-46c867d80430"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.468945 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c1ba18bf-6eb8-445c-b03c-46c867d80430" (UID: "c1ba18bf-6eb8-445c-b03c-46c867d80430"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.499185 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.499212 4916 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.499223 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4p8s\" (UniqueName: \"kubernetes.io/projected/c1ba18bf-6eb8-445c-b03c-46c867d80430-kube-api-access-r4p8s\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.499231 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.499240 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:05 crc kubenswrapper[4916]: I1203 19:52:05.499248 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c1ba18bf-6eb8-445c-b03c-46c867d80430-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:06 crc kubenswrapper[4916]: I1203 19:52:06.162893 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5t7sm" event={"ID":"80531945-5ede-449d-9903-5fe49857e211","Type":"ContainerStarted","Data":"e10f7549a490272f9fc7f8c7d38d07e96c3a775215e6ea8ef3cb304b4f5cb099"} Dec 03 19:52:06 crc kubenswrapper[4916]: I1203 19:52:06.163357 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5t7sm" event={"ID":"80531945-5ede-449d-9903-5fe49857e211","Type":"ContainerStarted","Data":"236e7e273f55fa8aced35047745f521cdefa442ae70fa0de0841044c83790c6e"} Dec 03 19:52:06 crc kubenswrapper[4916]: I1203 19:52:06.169207 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" event={"ID":"c1ba18bf-6eb8-445c-b03c-46c867d80430","Type":"ContainerDied","Data":"ebe262875e3c26748ec8689f5146c9963b6fe94bef26a15e7cffee1e17957339"} Dec 03 19:52:06 crc kubenswrapper[4916]: I1203 19:52:06.169278 4916 scope.go:117] "RemoveContainer" containerID="897a04a7dec9589a3e29cd25e43cf4d06c573332914d31ade95414f1996cd25b" Dec 03 19:52:06 crc kubenswrapper[4916]: I1203 19:52:06.169518 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-9b86998b5-j4z8k" Dec 03 19:52:06 crc kubenswrapper[4916]: I1203 19:52:06.183723 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-5t7sm" podStartSLOduration=2.183699948 podStartE2EDuration="2.183699948s" podCreationTimestamp="2025-12-03 19:52:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:52:06.17929052 +0000 UTC m=+1342.142100816" watchObservedRunningTime="2025-12-03 19:52:06.183699948 +0000 UTC m=+1342.146510244" Dec 03 19:52:06 crc kubenswrapper[4916]: I1203 19:52:06.199191 4916 scope.go:117] "RemoveContainer" containerID="94204763d6de76050c8b7d0c42aa74ff2688607265a1d16eb3c8a09c9c4ba063" Dec 03 19:52:06 crc kubenswrapper[4916]: I1203 19:52:06.245429 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-9b86998b5-j4z8k"] Dec 03 19:52:06 crc kubenswrapper[4916]: I1203 19:52:06.260149 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-9b86998b5-j4z8k"] Dec 03 19:52:06 crc kubenswrapper[4916]: I1203 19:52:06.492432 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1ba18bf-6eb8-445c-b03c-46c867d80430" path="/var/lib/kubelet/pods/c1ba18bf-6eb8-445c-b03c-46c867d80430/volumes" Dec 03 19:52:10 crc kubenswrapper[4916]: I1203 19:52:10.229448 4916 generic.go:334] "Generic (PLEG): container finished" podID="80531945-5ede-449d-9903-5fe49857e211" containerID="e10f7549a490272f9fc7f8c7d38d07e96c3a775215e6ea8ef3cb304b4f5cb099" exitCode=0 Dec 03 19:52:10 crc kubenswrapper[4916]: I1203 19:52:10.229535 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5t7sm" event={"ID":"80531945-5ede-449d-9903-5fe49857e211","Type":"ContainerDied","Data":"e10f7549a490272f9fc7f8c7d38d07e96c3a775215e6ea8ef3cb304b4f5cb099"} Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.472546 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.473007 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.701235 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.833197 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-scripts\") pod \"80531945-5ede-449d-9903-5fe49857e211\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.833442 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dtzg\" (UniqueName: \"kubernetes.io/projected/80531945-5ede-449d-9903-5fe49857e211-kube-api-access-8dtzg\") pod \"80531945-5ede-449d-9903-5fe49857e211\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.833537 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-config-data\") pod \"80531945-5ede-449d-9903-5fe49857e211\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.833556 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-combined-ca-bundle\") pod \"80531945-5ede-449d-9903-5fe49857e211\" (UID: \"80531945-5ede-449d-9903-5fe49857e211\") " Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.839681 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-scripts" (OuterVolumeSpecName: "scripts") pod "80531945-5ede-449d-9903-5fe49857e211" (UID: "80531945-5ede-449d-9903-5fe49857e211"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.841196 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80531945-5ede-449d-9903-5fe49857e211-kube-api-access-8dtzg" (OuterVolumeSpecName: "kube-api-access-8dtzg") pod "80531945-5ede-449d-9903-5fe49857e211" (UID: "80531945-5ede-449d-9903-5fe49857e211"). InnerVolumeSpecName "kube-api-access-8dtzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.862669 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-config-data" (OuterVolumeSpecName: "config-data") pod "80531945-5ede-449d-9903-5fe49857e211" (UID: "80531945-5ede-449d-9903-5fe49857e211"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.888330 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80531945-5ede-449d-9903-5fe49857e211" (UID: "80531945-5ede-449d-9903-5fe49857e211"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.935936 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.935981 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dtzg\" (UniqueName: \"kubernetes.io/projected/80531945-5ede-449d-9903-5fe49857e211-kube-api-access-8dtzg\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.935995 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:11 crc kubenswrapper[4916]: I1203 19:52:11.936008 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80531945-5ede-449d-9903-5fe49857e211-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.257834 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-5t7sm" event={"ID":"80531945-5ede-449d-9903-5fe49857e211","Type":"ContainerDied","Data":"236e7e273f55fa8aced35047745f521cdefa442ae70fa0de0841044c83790c6e"} Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.257909 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="236e7e273f55fa8aced35047745f521cdefa442ae70fa0de0841044c83790c6e" Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.257920 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-5t7sm" Dec 03 19:52:12 crc kubenswrapper[4916]: E1203 19:52:12.296048 4916 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode34d9c63_d03a_453c_997e_1e47baa58589.slice/crio-conmon-d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0.scope\": RecentStats: unable to find data in memory cache]" Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.440078 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.440587 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerName="nova-api-log" containerID="cri-o://af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36" gracePeriod=30 Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.440677 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerName="nova-api-api" containerID="cri-o://a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6" gracePeriod=30 Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.448516 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.205:8774/\": EOF" Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.465937 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.205:8774/\": EOF" Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.467483 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.467693 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="1f124358-6ae2-4378-80db-d30606496eca" containerName="nova-scheduler-scheduler" containerID="cri-o://12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559" gracePeriod=30 Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.537283 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.537578 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerName="nova-metadata-log" containerID="cri-o://2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba" gracePeriod=30 Dec 03 19:52:12 crc kubenswrapper[4916]: I1203 19:52:12.537878 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerName="nova-metadata-metadata" containerID="cri-o://fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e" gracePeriod=30 Dec 03 19:52:13 crc kubenswrapper[4916]: I1203 19:52:13.275706 4916 generic.go:334] "Generic (PLEG): container finished" podID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerID="af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36" exitCode=143 Dec 03 19:52:13 crc kubenswrapper[4916]: I1203 19:52:13.275797 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1ea731d4-086e-4b79-83c4-d7179c941d04","Type":"ContainerDied","Data":"af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36"} Dec 03 19:52:13 crc kubenswrapper[4916]: I1203 19:52:13.278387 4916 generic.go:334] "Generic (PLEG): container finished" podID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerID="2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba" exitCode=143 Dec 03 19:52:13 crc kubenswrapper[4916]: I1203 19:52:13.278467 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"721211b9-a2ae-4a64-bc3c-f243277baf71","Type":"ContainerDied","Data":"2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba"} Dec 03 19:52:13 crc kubenswrapper[4916]: E1203 19:52:13.976602 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 19:52:13 crc kubenswrapper[4916]: E1203 19:52:13.978417 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 19:52:13 crc kubenswrapper[4916]: E1203 19:52:13.979897 4916 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 19:52:13 crc kubenswrapper[4916]: E1203 19:52:13.979964 4916 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="1f124358-6ae2-4378-80db-d30606496eca" containerName="nova-scheduler-scheduler" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.221556 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.319669 4916 generic.go:334] "Generic (PLEG): container finished" podID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerID="fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e" exitCode=0 Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.319738 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"721211b9-a2ae-4a64-bc3c-f243277baf71","Type":"ContainerDied","Data":"fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e"} Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.319769 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"721211b9-a2ae-4a64-bc3c-f243277baf71","Type":"ContainerDied","Data":"a11184523cf07db5ff24a0700dd5ee7bb0587b0be38585c849acdb72d5eb4581"} Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.319788 4916 scope.go:117] "RemoveContainer" containerID="fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.319835 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.327705 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/721211b9-a2ae-4a64-bc3c-f243277baf71-logs\") pod \"721211b9-a2ae-4a64-bc3c-f243277baf71\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.327766 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jt7vs\" (UniqueName: \"kubernetes.io/projected/721211b9-a2ae-4a64-bc3c-f243277baf71-kube-api-access-jt7vs\") pod \"721211b9-a2ae-4a64-bc3c-f243277baf71\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.327828 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-nova-metadata-tls-certs\") pod \"721211b9-a2ae-4a64-bc3c-f243277baf71\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.327876 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-combined-ca-bundle\") pod \"721211b9-a2ae-4a64-bc3c-f243277baf71\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.328002 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-config-data\") pod \"721211b9-a2ae-4a64-bc3c-f243277baf71\" (UID: \"721211b9-a2ae-4a64-bc3c-f243277baf71\") " Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.328386 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/721211b9-a2ae-4a64-bc3c-f243277baf71-logs" (OuterVolumeSpecName: "logs") pod "721211b9-a2ae-4a64-bc3c-f243277baf71" (UID: "721211b9-a2ae-4a64-bc3c-f243277baf71"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.328728 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/721211b9-a2ae-4a64-bc3c-f243277baf71-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.341969 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/721211b9-a2ae-4a64-bc3c-f243277baf71-kube-api-access-jt7vs" (OuterVolumeSpecName: "kube-api-access-jt7vs") pod "721211b9-a2ae-4a64-bc3c-f243277baf71" (UID: "721211b9-a2ae-4a64-bc3c-f243277baf71"). InnerVolumeSpecName "kube-api-access-jt7vs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.365506 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-config-data" (OuterVolumeSpecName: "config-data") pod "721211b9-a2ae-4a64-bc3c-f243277baf71" (UID: "721211b9-a2ae-4a64-bc3c-f243277baf71"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.372083 4916 scope.go:117] "RemoveContainer" containerID="2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.387340 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "721211b9-a2ae-4a64-bc3c-f243277baf71" (UID: "721211b9-a2ae-4a64-bc3c-f243277baf71"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.387918 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "721211b9-a2ae-4a64-bc3c-f243277baf71" (UID: "721211b9-a2ae-4a64-bc3c-f243277baf71"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.430295 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.430328 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jt7vs\" (UniqueName: \"kubernetes.io/projected/721211b9-a2ae-4a64-bc3c-f243277baf71-kube-api-access-jt7vs\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.430338 4916 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.430348 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/721211b9-a2ae-4a64-bc3c-f243277baf71-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.494030 4916 scope.go:117] "RemoveContainer" containerID="fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e" Dec 03 19:52:16 crc kubenswrapper[4916]: E1203 19:52:16.494492 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e\": container with ID starting with fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e not found: ID does not exist" containerID="fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.494544 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e"} err="failed to get container status \"fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e\": rpc error: code = NotFound desc = could not find container \"fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e\": container with ID starting with fac963447944b3a8a27523307f35983af3707e491f1fafb8a1c137bea3fd111e not found: ID does not exist" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.494630 4916 scope.go:117] "RemoveContainer" containerID="2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba" Dec 03 19:52:16 crc kubenswrapper[4916]: E1203 19:52:16.494977 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba\": container with ID starting with 2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba not found: ID does not exist" containerID="2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.495039 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba"} err="failed to get container status \"2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba\": rpc error: code = NotFound desc = could not find container \"2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba\": container with ID starting with 2c5258ddc349da44cd6a093a63c7f321c3c4cc0951c641d7b6a1b4180327a3ba not found: ID does not exist" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.656936 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.678633 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.695217 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:52:16 crc kubenswrapper[4916]: E1203 19:52:16.695910 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ba18bf-6eb8-445c-b03c-46c867d80430" containerName="init" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.695941 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ba18bf-6eb8-445c-b03c-46c867d80430" containerName="init" Dec 03 19:52:16 crc kubenswrapper[4916]: E1203 19:52:16.695965 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80531945-5ede-449d-9903-5fe49857e211" containerName="nova-manage" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.695977 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="80531945-5ede-449d-9903-5fe49857e211" containerName="nova-manage" Dec 03 19:52:16 crc kubenswrapper[4916]: E1203 19:52:16.696000 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ba18bf-6eb8-445c-b03c-46c867d80430" containerName="dnsmasq-dns" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.696014 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ba18bf-6eb8-445c-b03c-46c867d80430" containerName="dnsmasq-dns" Dec 03 19:52:16 crc kubenswrapper[4916]: E1203 19:52:16.696049 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerName="nova-metadata-log" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.696061 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerName="nova-metadata-log" Dec 03 19:52:16 crc kubenswrapper[4916]: E1203 19:52:16.696080 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerName="nova-metadata-metadata" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.696092 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerName="nova-metadata-metadata" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.696462 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerName="nova-metadata-metadata" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.696493 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1ba18bf-6eb8-445c-b03c-46c867d80430" containerName="dnsmasq-dns" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.696519 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="80531945-5ede-449d-9903-5fe49857e211" containerName="nova-manage" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.696537 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="721211b9-a2ae-4a64-bc3c-f243277baf71" containerName="nova-metadata-log" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.698428 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.701699 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.702088 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.709902 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.839782 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcjj5\" (UniqueName: \"kubernetes.io/projected/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-kube-api-access-gcjj5\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.839894 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-logs\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.840484 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.840782 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-config-data\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.840886 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.942719 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-logs\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.942789 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.942882 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-config-data\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.942934 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.942976 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcjj5\" (UniqueName: \"kubernetes.io/projected/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-kube-api-access-gcjj5\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.943625 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-logs\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.950274 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.950288 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.950970 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-config-data\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:16 crc kubenswrapper[4916]: I1203 19:52:16.962109 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcjj5\" (UniqueName: \"kubernetes.io/projected/d3baf082-dd08-4c10-aac9-8ce2874aa2ae-kube-api-access-gcjj5\") pod \"nova-metadata-0\" (UID: \"d3baf082-dd08-4c10-aac9-8ce2874aa2ae\") " pod="openstack/nova-metadata-0" Dec 03 19:52:17 crc kubenswrapper[4916]: I1203 19:52:17.021650 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 19:52:17 crc kubenswrapper[4916]: W1203 19:52:17.515479 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3baf082_dd08_4c10_aac9_8ce2874aa2ae.slice/crio-03ecd079dc354518e6a5ade12a38074302f8295145a3489ce7bcb342225ab499 WatchSource:0}: Error finding container 03ecd079dc354518e6a5ade12a38074302f8295145a3489ce7bcb342225ab499: Status 404 returned error can't find the container with id 03ecd079dc354518e6a5ade12a38074302f8295145a3489ce7bcb342225ab499 Dec 03 19:52:17 crc kubenswrapper[4916]: I1203 19:52:17.515831 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 19:52:17 crc kubenswrapper[4916]: I1203 19:52:17.933550 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.064940 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-combined-ca-bundle\") pod \"1f124358-6ae2-4378-80db-d30606496eca\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.065012 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k99r4\" (UniqueName: \"kubernetes.io/projected/1f124358-6ae2-4378-80db-d30606496eca-kube-api-access-k99r4\") pod \"1f124358-6ae2-4378-80db-d30606496eca\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.065047 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-config-data\") pod \"1f124358-6ae2-4378-80db-d30606496eca\" (UID: \"1f124358-6ae2-4378-80db-d30606496eca\") " Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.072557 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f124358-6ae2-4378-80db-d30606496eca-kube-api-access-k99r4" (OuterVolumeSpecName: "kube-api-access-k99r4") pod "1f124358-6ae2-4378-80db-d30606496eca" (UID: "1f124358-6ae2-4378-80db-d30606496eca"). InnerVolumeSpecName "kube-api-access-k99r4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.097141 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-config-data" (OuterVolumeSpecName: "config-data") pod "1f124358-6ae2-4378-80db-d30606496eca" (UID: "1f124358-6ae2-4378-80db-d30606496eca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.112196 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1f124358-6ae2-4378-80db-d30606496eca" (UID: "1f124358-6ae2-4378-80db-d30606496eca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.169105 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.169133 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k99r4\" (UniqueName: \"kubernetes.io/projected/1f124358-6ae2-4378-80db-d30606496eca-kube-api-access-k99r4\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.169143 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f124358-6ae2-4378-80db-d30606496eca-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.259895 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.341848 4916 generic.go:334] "Generic (PLEG): container finished" podID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerID="a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6" exitCode=0 Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.341972 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.341970 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1ea731d4-086e-4b79-83c4-d7179c941d04","Type":"ContainerDied","Data":"a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6"} Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.342082 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"1ea731d4-086e-4b79-83c4-d7179c941d04","Type":"ContainerDied","Data":"c461009d65682cf48d5878c401ffe0552e3b8ffe9c1a11b2570037acc40f821e"} Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.342101 4916 scope.go:117] "RemoveContainer" containerID="a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.344243 4916 generic.go:334] "Generic (PLEG): container finished" podID="1f124358-6ae2-4378-80db-d30606496eca" containerID="12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559" exitCode=0 Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.344326 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1f124358-6ae2-4378-80db-d30606496eca","Type":"ContainerDied","Data":"12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559"} Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.344354 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1f124358-6ae2-4378-80db-d30606496eca","Type":"ContainerDied","Data":"f0217d84ac18ac7cfc7ba81373791eb8e4a086a445c92ecbbc82cc34836a996a"} Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.344408 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.349148 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3baf082-dd08-4c10-aac9-8ce2874aa2ae","Type":"ContainerStarted","Data":"44f1fd6844647109bd1b0c4fe92fa6de4f2720435e966ffc961cfdfcdaf1c67d"} Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.349187 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3baf082-dd08-4c10-aac9-8ce2874aa2ae","Type":"ContainerStarted","Data":"7a4fb42a021da4eebb08bbfb9cd921a45a0d14bed0f5e483868bbcd76d062d5b"} Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.349204 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"d3baf082-dd08-4c10-aac9-8ce2874aa2ae","Type":"ContainerStarted","Data":"03ecd079dc354518e6a5ade12a38074302f8295145a3489ce7bcb342225ab499"} Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.366655 4916 scope.go:117] "RemoveContainer" containerID="af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.367057 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.367036769 podStartE2EDuration="2.367036769s" podCreationTimestamp="2025-12-03 19:52:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:52:18.367001368 +0000 UTC m=+1354.329811634" watchObservedRunningTime="2025-12-03 19:52:18.367036769 +0000 UTC m=+1354.329847055" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.371235 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-config-data\") pod \"1ea731d4-086e-4b79-83c4-d7179c941d04\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.371376 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-public-tls-certs\") pod \"1ea731d4-086e-4b79-83c4-d7179c941d04\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.371466 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-internal-tls-certs\") pod \"1ea731d4-086e-4b79-83c4-d7179c941d04\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.371555 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgj7l\" (UniqueName: \"kubernetes.io/projected/1ea731d4-086e-4b79-83c4-d7179c941d04-kube-api-access-fgj7l\") pod \"1ea731d4-086e-4b79-83c4-d7179c941d04\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.371617 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ea731d4-086e-4b79-83c4-d7179c941d04-logs\") pod \"1ea731d4-086e-4b79-83c4-d7179c941d04\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.371665 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-combined-ca-bundle\") pod \"1ea731d4-086e-4b79-83c4-d7179c941d04\" (UID: \"1ea731d4-086e-4b79-83c4-d7179c941d04\") " Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.372545 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ea731d4-086e-4b79-83c4-d7179c941d04-logs" (OuterVolumeSpecName: "logs") pod "1ea731d4-086e-4b79-83c4-d7179c941d04" (UID: "1ea731d4-086e-4b79-83c4-d7179c941d04"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.408974 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.409492 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ea731d4-086e-4b79-83c4-d7179c941d04-kube-api-access-fgj7l" (OuterVolumeSpecName: "kube-api-access-fgj7l") pod "1ea731d4-086e-4b79-83c4-d7179c941d04" (UID: "1ea731d4-086e-4b79-83c4-d7179c941d04"). InnerVolumeSpecName "kube-api-access-fgj7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.413057 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ea731d4-086e-4b79-83c4-d7179c941d04" (UID: "1ea731d4-086e-4b79-83c4-d7179c941d04"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.420375 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.428127 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-config-data" (OuterVolumeSpecName: "config-data") pod "1ea731d4-086e-4b79-83c4-d7179c941d04" (UID: "1ea731d4-086e-4b79-83c4-d7179c941d04"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.428138 4916 scope.go:117] "RemoveContainer" containerID="a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6" Dec 03 19:52:18 crc kubenswrapper[4916]: E1203 19:52:18.428788 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6\": container with ID starting with a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6 not found: ID does not exist" containerID="a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.428835 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6"} err="failed to get container status \"a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6\": rpc error: code = NotFound desc = could not find container \"a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6\": container with ID starting with a09242712b3de44a52e36afd19afff562ae8c2734eb9b2d805311dd35203dfa6 not found: ID does not exist" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.428865 4916 scope.go:117] "RemoveContainer" containerID="af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36" Dec 03 19:52:18 crc kubenswrapper[4916]: E1203 19:52:18.429436 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36\": container with ID starting with af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36 not found: ID does not exist" containerID="af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.429481 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36"} err="failed to get container status \"af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36\": rpc error: code = NotFound desc = could not find container \"af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36\": container with ID starting with af928cdaec97a32bd77a75a85abd13bf4993ed36a0dce5094731567e8c5f6c36 not found: ID does not exist" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.429508 4916 scope.go:117] "RemoveContainer" containerID="12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.430120 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:52:18 crc kubenswrapper[4916]: E1203 19:52:18.430630 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f124358-6ae2-4378-80db-d30606496eca" containerName="nova-scheduler-scheduler" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.430653 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f124358-6ae2-4378-80db-d30606496eca" containerName="nova-scheduler-scheduler" Dec 03 19:52:18 crc kubenswrapper[4916]: E1203 19:52:18.430692 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerName="nova-api-log" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.430703 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerName="nova-api-log" Dec 03 19:52:18 crc kubenswrapper[4916]: E1203 19:52:18.430722 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerName="nova-api-api" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.430730 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerName="nova-api-api" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.431009 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f124358-6ae2-4378-80db-d30606496eca" containerName="nova-scheduler-scheduler" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.431041 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerName="nova-api-log" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.431053 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ea731d4-086e-4b79-83c4-d7179c941d04" containerName="nova-api-api" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.432151 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.434946 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.456659 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.458673 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1ea731d4-086e-4b79-83c4-d7179c941d04" (UID: "1ea731d4-086e-4b79-83c4-d7179c941d04"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.460151 4916 scope.go:117] "RemoveContainer" containerID="12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559" Dec 03 19:52:18 crc kubenswrapper[4916]: E1203 19:52:18.460536 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559\": container with ID starting with 12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559 not found: ID does not exist" containerID="12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.460646 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559"} err="failed to get container status \"12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559\": rpc error: code = NotFound desc = could not find container \"12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559\": container with ID starting with 12d23e5eb081a451b4a27773733ef685cd91591fbcf4f314664d4aac6c7ce559 not found: ID does not exist" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.474895 4916 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.474935 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgj7l\" (UniqueName: \"kubernetes.io/projected/1ea731d4-086e-4b79-83c4-d7179c941d04-kube-api-access-fgj7l\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.474950 4916 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ea731d4-086e-4b79-83c4-d7179c941d04-logs\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.474963 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.474975 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.481848 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1ea731d4-086e-4b79-83c4-d7179c941d04" (UID: "1ea731d4-086e-4b79-83c4-d7179c941d04"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.488770 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f124358-6ae2-4378-80db-d30606496eca" path="/var/lib/kubelet/pods/1f124358-6ae2-4378-80db-d30606496eca/volumes" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.489349 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="721211b9-a2ae-4a64-bc3c-f243277baf71" path="/var/lib/kubelet/pods/721211b9-a2ae-4a64-bc3c-f243277baf71/volumes" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.577137 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/615a55a3-e9f9-4261-96a6-bcf865f0c183-config-data\") pod \"nova-scheduler-0\" (UID: \"615a55a3-e9f9-4261-96a6-bcf865f0c183\") " pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.577468 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm9t9\" (UniqueName: \"kubernetes.io/projected/615a55a3-e9f9-4261-96a6-bcf865f0c183-kube-api-access-wm9t9\") pod \"nova-scheduler-0\" (UID: \"615a55a3-e9f9-4261-96a6-bcf865f0c183\") " pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.577518 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/615a55a3-e9f9-4261-96a6-bcf865f0c183-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"615a55a3-e9f9-4261-96a6-bcf865f0c183\") " pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.577928 4916 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea731d4-086e-4b79-83c4-d7179c941d04-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.666721 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.676811 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.679599 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm9t9\" (UniqueName: \"kubernetes.io/projected/615a55a3-e9f9-4261-96a6-bcf865f0c183-kube-api-access-wm9t9\") pod \"nova-scheduler-0\" (UID: \"615a55a3-e9f9-4261-96a6-bcf865f0c183\") " pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.679650 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/615a55a3-e9f9-4261-96a6-bcf865f0c183-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"615a55a3-e9f9-4261-96a6-bcf865f0c183\") " pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.679808 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/615a55a3-e9f9-4261-96a6-bcf865f0c183-config-data\") pod \"nova-scheduler-0\" (UID: \"615a55a3-e9f9-4261-96a6-bcf865f0c183\") " pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.683593 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/615a55a3-e9f9-4261-96a6-bcf865f0c183-config-data\") pod \"nova-scheduler-0\" (UID: \"615a55a3-e9f9-4261-96a6-bcf865f0c183\") " pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.683989 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/615a55a3-e9f9-4261-96a6-bcf865f0c183-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"615a55a3-e9f9-4261-96a6-bcf865f0c183\") " pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.689256 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.690925 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.696766 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.697206 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.697516 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.707330 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.710178 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm9t9\" (UniqueName: \"kubernetes.io/projected/615a55a3-e9f9-4261-96a6-bcf865f0c183-kube-api-access-wm9t9\") pod \"nova-scheduler-0\" (UID: \"615a55a3-e9f9-4261-96a6-bcf865f0c183\") " pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.756524 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.782279 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.782455 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbzmm\" (UniqueName: \"kubernetes.io/projected/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-kube-api-access-lbzmm\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.782522 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.782623 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-config-data\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.782680 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-public-tls-certs\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.782730 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-logs\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.885028 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbzmm\" (UniqueName: \"kubernetes.io/projected/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-kube-api-access-lbzmm\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.885557 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.885719 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-config-data\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.885769 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-public-tls-certs\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.885855 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-logs\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.885931 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.887939 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-logs\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.891214 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-public-tls-certs\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.891868 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-config-data\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.892428 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.893398 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:18 crc kubenswrapper[4916]: I1203 19:52:18.911285 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbzmm\" (UniqueName: \"kubernetes.io/projected/d9f3c72c-b924-4d5e-8c68-f62d5e83a870-kube-api-access-lbzmm\") pod \"nova-api-0\" (UID: \"d9f3c72c-b924-4d5e-8c68-f62d5e83a870\") " pod="openstack/nova-api-0" Dec 03 19:52:19 crc kubenswrapper[4916]: I1203 19:52:19.011817 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 19:52:19 crc kubenswrapper[4916]: I1203 19:52:19.237743 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 19:52:19 crc kubenswrapper[4916]: I1203 19:52:19.365930 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"615a55a3-e9f9-4261-96a6-bcf865f0c183","Type":"ContainerStarted","Data":"3cd958870151912e73b55c3a34a24aaf9ac1ef2f702c011807ea7a1f2489452d"} Dec 03 19:52:19 crc kubenswrapper[4916]: I1203 19:52:19.518782 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 19:52:20 crc kubenswrapper[4916]: I1203 19:52:20.403634 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d9f3c72c-b924-4d5e-8c68-f62d5e83a870","Type":"ContainerStarted","Data":"4717d4b3e27c7740548eacf8fe81f8d99ea85a842e60c0852a8b19598077ebae"} Dec 03 19:52:20 crc kubenswrapper[4916]: I1203 19:52:20.404034 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d9f3c72c-b924-4d5e-8c68-f62d5e83a870","Type":"ContainerStarted","Data":"b31b98e5ba64f5878bf133b6de877f71b8a2507576e40139df676e83b3e7f74a"} Dec 03 19:52:20 crc kubenswrapper[4916]: I1203 19:52:20.404065 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d9f3c72c-b924-4d5e-8c68-f62d5e83a870","Type":"ContainerStarted","Data":"895825b88ff4ce3a7a8555d0bb88bdd5f7aa00bc19dcb5b20d3088cea44d841e"} Dec 03 19:52:20 crc kubenswrapper[4916]: I1203 19:52:20.405594 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"615a55a3-e9f9-4261-96a6-bcf865f0c183","Type":"ContainerStarted","Data":"d6b8d9ff5c0842450e8220a2e10c156a100db8c5afd9cd0d31276695ccb4725c"} Dec 03 19:52:20 crc kubenswrapper[4916]: I1203 19:52:20.425995 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.425977413 podStartE2EDuration="2.425977413s" podCreationTimestamp="2025-12-03 19:52:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:52:20.424648038 +0000 UTC m=+1356.387458304" watchObservedRunningTime="2025-12-03 19:52:20.425977413 +0000 UTC m=+1356.388787679" Dec 03 19:52:20 crc kubenswrapper[4916]: I1203 19:52:20.447075 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.447058217 podStartE2EDuration="2.447058217s" podCreationTimestamp="2025-12-03 19:52:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:52:20.440924673 +0000 UTC m=+1356.403734949" watchObservedRunningTime="2025-12-03 19:52:20.447058217 +0000 UTC m=+1356.409868483" Dec 03 19:52:20 crc kubenswrapper[4916]: I1203 19:52:20.492867 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ea731d4-086e-4b79-83c4-d7179c941d04" path="/var/lib/kubelet/pods/1ea731d4-086e-4b79-83c4-d7179c941d04/volumes" Dec 03 19:52:22 crc kubenswrapper[4916]: I1203 19:52:22.024788 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 19:52:22 crc kubenswrapper[4916]: I1203 19:52:22.025266 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 19:52:22 crc kubenswrapper[4916]: E1203 19:52:22.585628 4916 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode34d9c63_d03a_453c_997e_1e47baa58589.slice/crio-conmon-d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0.scope\": RecentStats: unable to find data in memory cache]" Dec 03 19:52:23 crc kubenswrapper[4916]: I1203 19:52:23.757091 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 19:52:27 crc kubenswrapper[4916]: I1203 19:52:27.022121 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 19:52:27 crc kubenswrapper[4916]: I1203 19:52:27.022489 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 19:52:28 crc kubenswrapper[4916]: I1203 19:52:28.036914 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d3baf082-dd08-4c10-aac9-8ce2874aa2ae" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 19:52:28 crc kubenswrapper[4916]: I1203 19:52:28.036970 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="d3baf082-dd08-4c10-aac9-8ce2874aa2ae" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 19:52:28 crc kubenswrapper[4916]: I1203 19:52:28.758045 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 19:52:28 crc kubenswrapper[4916]: I1203 19:52:28.802966 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 19:52:29 crc kubenswrapper[4916]: I1203 19:52:29.013012 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 19:52:29 crc kubenswrapper[4916]: I1203 19:52:29.013071 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 19:52:29 crc kubenswrapper[4916]: I1203 19:52:29.494829 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 19:52:29 crc kubenswrapper[4916]: I1203 19:52:29.536619 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 19:52:30 crc kubenswrapper[4916]: I1203 19:52:30.026739 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d9f3c72c-b924-4d5e-8c68-f62d5e83a870" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 19:52:30 crc kubenswrapper[4916]: I1203 19:52:30.026759 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d9f3c72c-b924-4d5e-8c68-f62d5e83a870" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 19:52:32 crc kubenswrapper[4916]: E1203 19:52:32.834725 4916 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode34d9c63_d03a_453c_997e_1e47baa58589.slice/crio-conmon-d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0.scope\": RecentStats: unable to find data in memory cache]" Dec 03 19:52:33 crc kubenswrapper[4916]: I1203 19:52:33.166082 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 19:52:33 crc kubenswrapper[4916]: I1203 19:52:33.166597 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="bd19dc41-00ef-46d1-ad30-4b9486db33ee" containerName="kube-state-metrics" containerID="cri-o://cdc92955a39188c448b97a8a07238252d9dfc1a3177ffe84ac938662a67ae7be" gracePeriod=30 Dec 03 19:52:33 crc kubenswrapper[4916]: I1203 19:52:33.558508 4916 generic.go:334] "Generic (PLEG): container finished" podID="bd19dc41-00ef-46d1-ad30-4b9486db33ee" containerID="cdc92955a39188c448b97a8a07238252d9dfc1a3177ffe84ac938662a67ae7be" exitCode=2 Dec 03 19:52:33 crc kubenswrapper[4916]: I1203 19:52:33.558742 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bd19dc41-00ef-46d1-ad30-4b9486db33ee","Type":"ContainerDied","Data":"cdc92955a39188c448b97a8a07238252d9dfc1a3177ffe84ac938662a67ae7be"} Dec 03 19:52:33 crc kubenswrapper[4916]: I1203 19:52:33.689177 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 19:52:33 crc kubenswrapper[4916]: I1203 19:52:33.810638 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cg8t2\" (UniqueName: \"kubernetes.io/projected/bd19dc41-00ef-46d1-ad30-4b9486db33ee-kube-api-access-cg8t2\") pod \"bd19dc41-00ef-46d1-ad30-4b9486db33ee\" (UID: \"bd19dc41-00ef-46d1-ad30-4b9486db33ee\") " Dec 03 19:52:33 crc kubenswrapper[4916]: I1203 19:52:33.818183 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd19dc41-00ef-46d1-ad30-4b9486db33ee-kube-api-access-cg8t2" (OuterVolumeSpecName: "kube-api-access-cg8t2") pod "bd19dc41-00ef-46d1-ad30-4b9486db33ee" (UID: "bd19dc41-00ef-46d1-ad30-4b9486db33ee"). InnerVolumeSpecName "kube-api-access-cg8t2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:52:33 crc kubenswrapper[4916]: I1203 19:52:33.912542 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cg8t2\" (UniqueName: \"kubernetes.io/projected/bd19dc41-00ef-46d1-ad30-4b9486db33ee-kube-api-access-cg8t2\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.581352 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"bd19dc41-00ef-46d1-ad30-4b9486db33ee","Type":"ContainerDied","Data":"128c20e6f1b80c185546a7bec9caa838fb0bf250327285fdc66bfe22b7525523"} Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.581715 4916 scope.go:117] "RemoveContainer" containerID="cdc92955a39188c448b97a8a07238252d9dfc1a3177ffe84ac938662a67ae7be" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.581434 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.612036 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.631714 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.648274 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 19:52:34 crc kubenswrapper[4916]: E1203 19:52:34.648848 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd19dc41-00ef-46d1-ad30-4b9486db33ee" containerName="kube-state-metrics" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.648877 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd19dc41-00ef-46d1-ad30-4b9486db33ee" containerName="kube-state-metrics" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.649157 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd19dc41-00ef-46d1-ad30-4b9486db33ee" containerName="kube-state-metrics" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.650043 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.652363 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.652657 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.676438 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.730523 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/9d38924d-74b6-46db-9588-fa5c485fba69-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.730602 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d38924d-74b6-46db-9588-fa5c485fba69-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.730740 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxdmt\" (UniqueName: \"kubernetes.io/projected/9d38924d-74b6-46db-9588-fa5c485fba69-kube-api-access-sxdmt\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.730834 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d38924d-74b6-46db-9588-fa5c485fba69-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.832923 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxdmt\" (UniqueName: \"kubernetes.io/projected/9d38924d-74b6-46db-9588-fa5c485fba69-kube-api-access-sxdmt\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.833106 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d38924d-74b6-46db-9588-fa5c485fba69-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.833192 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/9d38924d-74b6-46db-9588-fa5c485fba69-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.833243 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d38924d-74b6-46db-9588-fa5c485fba69-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.839710 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d38924d-74b6-46db-9588-fa5c485fba69-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.841141 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d38924d-74b6-46db-9588-fa5c485fba69-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.845190 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/9d38924d-74b6-46db-9588-fa5c485fba69-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.859818 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxdmt\" (UniqueName: \"kubernetes.io/projected/9d38924d-74b6-46db-9588-fa5c485fba69-kube-api-access-sxdmt\") pod \"kube-state-metrics-0\" (UID: \"9d38924d-74b6-46db-9588-fa5c485fba69\") " pod="openstack/kube-state-metrics-0" Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.872392 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.872753 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="ceilometer-central-agent" containerID="cri-o://539d9d5138bea595449b7c9d9da4ac6dd47681eff3b12ba75a5e6559c484ee46" gracePeriod=30 Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.872873 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="sg-core" containerID="cri-o://7a17df25de48e60e8e28349bef821cfb6ee717ebac574c9c6f2647879af6b326" gracePeriod=30 Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.872917 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="proxy-httpd" containerID="cri-o://26f91255685944b93a16c424a56b1099e2d8f7f7376b5f056e52388f93f4a2e1" gracePeriod=30 Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.872887 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="ceilometer-notification-agent" containerID="cri-o://5282e42e7fb601656bdd00b5a01d046b835b137c0ced165c5fcda394a26ec979" gracePeriod=30 Dec 03 19:52:34 crc kubenswrapper[4916]: I1203 19:52:34.969980 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 19:52:35 crc kubenswrapper[4916]: I1203 19:52:35.450376 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 19:52:35 crc kubenswrapper[4916]: W1203 19:52:35.455591 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d38924d_74b6_46db_9588_fa5c485fba69.slice/crio-062d2d41b6dc19a8a78db96d6dad0e67921d9420c589daea893dd4677c870823 WatchSource:0}: Error finding container 062d2d41b6dc19a8a78db96d6dad0e67921d9420c589daea893dd4677c870823: Status 404 returned error can't find the container with id 062d2d41b6dc19a8a78db96d6dad0e67921d9420c589daea893dd4677c870823 Dec 03 19:52:35 crc kubenswrapper[4916]: I1203 19:52:35.458251 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 19:52:35 crc kubenswrapper[4916]: I1203 19:52:35.596393 4916 generic.go:334] "Generic (PLEG): container finished" podID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerID="26f91255685944b93a16c424a56b1099e2d8f7f7376b5f056e52388f93f4a2e1" exitCode=0 Dec 03 19:52:35 crc kubenswrapper[4916]: I1203 19:52:35.596426 4916 generic.go:334] "Generic (PLEG): container finished" podID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerID="7a17df25de48e60e8e28349bef821cfb6ee717ebac574c9c6f2647879af6b326" exitCode=2 Dec 03 19:52:35 crc kubenswrapper[4916]: I1203 19:52:35.596434 4916 generic.go:334] "Generic (PLEG): container finished" podID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerID="539d9d5138bea595449b7c9d9da4ac6dd47681eff3b12ba75a5e6559c484ee46" exitCode=0 Dec 03 19:52:35 crc kubenswrapper[4916]: I1203 19:52:35.596473 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4cae38b-15b3-49c5-933f-ef683a1b2d99","Type":"ContainerDied","Data":"26f91255685944b93a16c424a56b1099e2d8f7f7376b5f056e52388f93f4a2e1"} Dec 03 19:52:35 crc kubenswrapper[4916]: I1203 19:52:35.596499 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4cae38b-15b3-49c5-933f-ef683a1b2d99","Type":"ContainerDied","Data":"7a17df25de48e60e8e28349bef821cfb6ee717ebac574c9c6f2647879af6b326"} Dec 03 19:52:35 crc kubenswrapper[4916]: I1203 19:52:35.596508 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4cae38b-15b3-49c5-933f-ef683a1b2d99","Type":"ContainerDied","Data":"539d9d5138bea595449b7c9d9da4ac6dd47681eff3b12ba75a5e6559c484ee46"} Dec 03 19:52:35 crc kubenswrapper[4916]: I1203 19:52:35.598017 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9d38924d-74b6-46db-9588-fa5c485fba69","Type":"ContainerStarted","Data":"062d2d41b6dc19a8a78db96d6dad0e67921d9420c589daea893dd4677c870823"} Dec 03 19:52:36 crc kubenswrapper[4916]: I1203 19:52:36.488254 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd19dc41-00ef-46d1-ad30-4b9486db33ee" path="/var/lib/kubelet/pods/bd19dc41-00ef-46d1-ad30-4b9486db33ee/volumes" Dec 03 19:52:36 crc kubenswrapper[4916]: I1203 19:52:36.613162 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9d38924d-74b6-46db-9588-fa5c485fba69","Type":"ContainerStarted","Data":"b9d4c9c1cff2f05b54e9b35741db94311e30b693a1db82c5872629609803d58a"} Dec 03 19:52:36 crc kubenswrapper[4916]: I1203 19:52:36.613362 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 19:52:36 crc kubenswrapper[4916]: I1203 19:52:36.647385 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.271018009 podStartE2EDuration="2.64735924s" podCreationTimestamp="2025-12-03 19:52:34 +0000 UTC" firstStartedPulling="2025-12-03 19:52:35.458037017 +0000 UTC m=+1371.420847283" lastFinishedPulling="2025-12-03 19:52:35.834378248 +0000 UTC m=+1371.797188514" observedRunningTime="2025-12-03 19:52:36.631452073 +0000 UTC m=+1372.594262379" watchObservedRunningTime="2025-12-03 19:52:36.64735924 +0000 UTC m=+1372.610169536" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.029319 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.030110 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.037541 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.628618 4916 generic.go:334] "Generic (PLEG): container finished" podID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerID="5282e42e7fb601656bdd00b5a01d046b835b137c0ced165c5fcda394a26ec979" exitCode=0 Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.628662 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4cae38b-15b3-49c5-933f-ef683a1b2d99","Type":"ContainerDied","Data":"5282e42e7fb601656bdd00b5a01d046b835b137c0ced165c5fcda394a26ec979"} Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.636053 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.761287 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.889138 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-config-data\") pod \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.889203 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-sg-core-conf-yaml\") pod \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.889323 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-combined-ca-bundle\") pod \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.889350 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-log-httpd\") pod \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.889405 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbp6k\" (UniqueName: \"kubernetes.io/projected/b4cae38b-15b3-49c5-933f-ef683a1b2d99-kube-api-access-zbp6k\") pod \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.889453 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-run-httpd\") pod \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.889521 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-scripts\") pod \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\" (UID: \"b4cae38b-15b3-49c5-933f-ef683a1b2d99\") " Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.889845 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b4cae38b-15b3-49c5-933f-ef683a1b2d99" (UID: "b4cae38b-15b3-49c5-933f-ef683a1b2d99"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.889904 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b4cae38b-15b3-49c5-933f-ef683a1b2d99" (UID: "b4cae38b-15b3-49c5-933f-ef683a1b2d99"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.890095 4916 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.890113 4916 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4cae38b-15b3-49c5-933f-ef683a1b2d99-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.914533 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-scripts" (OuterVolumeSpecName: "scripts") pod "b4cae38b-15b3-49c5-933f-ef683a1b2d99" (UID: "b4cae38b-15b3-49c5-933f-ef683a1b2d99"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.914613 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4cae38b-15b3-49c5-933f-ef683a1b2d99-kube-api-access-zbp6k" (OuterVolumeSpecName: "kube-api-access-zbp6k") pod "b4cae38b-15b3-49c5-933f-ef683a1b2d99" (UID: "b4cae38b-15b3-49c5-933f-ef683a1b2d99"). InnerVolumeSpecName "kube-api-access-zbp6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.928515 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b4cae38b-15b3-49c5-933f-ef683a1b2d99" (UID: "b4cae38b-15b3-49c5-933f-ef683a1b2d99"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.979791 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4cae38b-15b3-49c5-933f-ef683a1b2d99" (UID: "b4cae38b-15b3-49c5-933f-ef683a1b2d99"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.991482 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.991520 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbp6k\" (UniqueName: \"kubernetes.io/projected/b4cae38b-15b3-49c5-933f-ef683a1b2d99-kube-api-access-zbp6k\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.991552 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:37 crc kubenswrapper[4916]: I1203 19:52:37.991580 4916 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.073489 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-config-data" (OuterVolumeSpecName: "config-data") pod "b4cae38b-15b3-49c5-933f-ef683a1b2d99" (UID: "b4cae38b-15b3-49c5-933f-ef683a1b2d99"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.093370 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4cae38b-15b3-49c5-933f-ef683a1b2d99-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.642242 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4cae38b-15b3-49c5-933f-ef683a1b2d99","Type":"ContainerDied","Data":"739330fab195a175ebffe6d9b42bee8df1401fee5556d79fa1256f356af2bc38"} Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.642300 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.642326 4916 scope.go:117] "RemoveContainer" containerID="26f91255685944b93a16c424a56b1099e2d8f7f7376b5f056e52388f93f4a2e1" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.674062 4916 scope.go:117] "RemoveContainer" containerID="7a17df25de48e60e8e28349bef821cfb6ee717ebac574c9c6f2647879af6b326" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.683018 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.715936 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.724346 4916 scope.go:117] "RemoveContainer" containerID="5282e42e7fb601656bdd00b5a01d046b835b137c0ced165c5fcda394a26ec979" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.731128 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:52:38 crc kubenswrapper[4916]: E1203 19:52:38.731656 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="sg-core" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.731680 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="sg-core" Dec 03 19:52:38 crc kubenswrapper[4916]: E1203 19:52:38.731717 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="proxy-httpd" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.731726 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="proxy-httpd" Dec 03 19:52:38 crc kubenswrapper[4916]: E1203 19:52:38.731742 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="ceilometer-notification-agent" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.731750 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="ceilometer-notification-agent" Dec 03 19:52:38 crc kubenswrapper[4916]: E1203 19:52:38.731768 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="ceilometer-central-agent" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.731775 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="ceilometer-central-agent" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.732039 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="sg-core" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.732054 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="proxy-httpd" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.732071 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="ceilometer-central-agent" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.732122 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" containerName="ceilometer-notification-agent" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.735337 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.739102 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.739386 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.739686 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.744240 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.754686 4916 scope.go:117] "RemoveContainer" containerID="539d9d5138bea595449b7c9d9da4ac6dd47681eff3b12ba75a5e6559c484ee46" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.807877 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-scripts\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.808029 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-config-data\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.808072 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-log-httpd\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.808104 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-run-httpd\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.808135 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.808191 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6972\" (UniqueName: \"kubernetes.io/projected/407a5f58-7c0e-43ae-aa16-008635f450ff-kube-api-access-w6972\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.808224 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.808250 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.909838 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-config-data\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.909895 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-log-httpd\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.909926 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-run-httpd\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.909954 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.910005 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6972\" (UniqueName: \"kubernetes.io/projected/407a5f58-7c0e-43ae-aa16-008635f450ff-kube-api-access-w6972\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.910036 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.910059 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.910100 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-scripts\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.911264 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-log-httpd\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.911447 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-run-httpd\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.915056 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.916907 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-config-data\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.917372 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.924733 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-scripts\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.926101 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:38 crc kubenswrapper[4916]: I1203 19:52:38.932250 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6972\" (UniqueName: \"kubernetes.io/projected/407a5f58-7c0e-43ae-aa16-008635f450ff-kube-api-access-w6972\") pod \"ceilometer-0\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " pod="openstack/ceilometer-0" Dec 03 19:52:39 crc kubenswrapper[4916]: I1203 19:52:39.019916 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 19:52:39 crc kubenswrapper[4916]: I1203 19:52:39.021032 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 19:52:39 crc kubenswrapper[4916]: I1203 19:52:39.021163 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 19:52:39 crc kubenswrapper[4916]: I1203 19:52:39.033940 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 19:52:39 crc kubenswrapper[4916]: I1203 19:52:39.052546 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 19:52:39 crc kubenswrapper[4916]: I1203 19:52:39.584888 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 19:52:39 crc kubenswrapper[4916]: I1203 19:52:39.656729 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"407a5f58-7c0e-43ae-aa16-008635f450ff","Type":"ContainerStarted","Data":"679af06bafa6ffc20c9115a199e152455f7d8f8e8cb678ce10edf3d037a1dbdb"} Dec 03 19:52:39 crc kubenswrapper[4916]: I1203 19:52:39.660846 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 19:52:39 crc kubenswrapper[4916]: I1203 19:52:39.668330 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 19:52:40 crc kubenswrapper[4916]: I1203 19:52:40.487728 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4cae38b-15b3-49c5-933f-ef683a1b2d99" path="/var/lib/kubelet/pods/b4cae38b-15b3-49c5-933f-ef683a1b2d99/volumes" Dec 03 19:52:40 crc kubenswrapper[4916]: I1203 19:52:40.672423 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"407a5f58-7c0e-43ae-aa16-008635f450ff","Type":"ContainerStarted","Data":"01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1"} Dec 03 19:52:41 crc kubenswrapper[4916]: I1203 19:52:41.687383 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"407a5f58-7c0e-43ae-aa16-008635f450ff","Type":"ContainerStarted","Data":"a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d"} Dec 03 19:52:42 crc kubenswrapper[4916]: I1203 19:52:42.696540 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"407a5f58-7c0e-43ae-aa16-008635f450ff","Type":"ContainerStarted","Data":"0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197"} Dec 03 19:52:43 crc kubenswrapper[4916]: E1203 19:52:43.131440 4916 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode34d9c63_d03a_453c_997e_1e47baa58589.slice/crio-conmon-d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0.scope\": RecentStats: unable to find data in memory cache]" Dec 03 19:52:44 crc kubenswrapper[4916]: I1203 19:52:44.987240 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 19:52:47 crc kubenswrapper[4916]: I1203 19:52:47.764098 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"407a5f58-7c0e-43ae-aa16-008635f450ff","Type":"ContainerStarted","Data":"8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153"} Dec 03 19:52:47 crc kubenswrapper[4916]: I1203 19:52:47.775366 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 19:52:47 crc kubenswrapper[4916]: I1203 19:52:47.817716 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.768883876 podStartE2EDuration="9.817690064s" podCreationTimestamp="2025-12-03 19:52:38 +0000 UTC" firstStartedPulling="2025-12-03 19:52:39.595312036 +0000 UTC m=+1375.558122302" lastFinishedPulling="2025-12-03 19:52:46.644118184 +0000 UTC m=+1382.606928490" observedRunningTime="2025-12-03 19:52:47.80971181 +0000 UTC m=+1383.772522086" watchObservedRunningTime="2025-12-03 19:52:47.817690064 +0000 UTC m=+1383.780500360" Dec 03 19:53:09 crc kubenswrapper[4916]: I1203 19:53:09.067626 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 19:53:20 crc kubenswrapper[4916]: I1203 19:53:20.349332 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 19:53:21 crc kubenswrapper[4916]: I1203 19:53:21.927540 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 19:53:24 crc kubenswrapper[4916]: I1203 19:53:24.242444 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="6ffb0836-d978-4f53-9a48-1174b647eeaf" containerName="rabbitmq" containerID="cri-o://cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6" gracePeriod=604797 Dec 03 19:53:26 crc kubenswrapper[4916]: I1203 19:53:26.876536 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="13520585-08f1-45f7-b40d-d53b9f047cfd" containerName="rabbitmq" containerID="cri-o://37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a" gracePeriod=604796 Dec 03 19:53:29 crc kubenswrapper[4916]: I1203 19:53:29.206336 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="6ffb0836-d978-4f53-9a48-1174b647eeaf" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Dec 03 19:53:29 crc kubenswrapper[4916]: I1203 19:53:29.522487 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="13520585-08f1-45f7-b40d-d53b9f047cfd" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.174696 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.232968 4916 generic.go:334] "Generic (PLEG): container finished" podID="6ffb0836-d978-4f53-9a48-1174b647eeaf" containerID="cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6" exitCode=0 Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.233017 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6ffb0836-d978-4f53-9a48-1174b647eeaf","Type":"ContainerDied","Data":"cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6"} Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.233049 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"6ffb0836-d978-4f53-9a48-1174b647eeaf","Type":"ContainerDied","Data":"ff49beee11b39bed7b082045ed64a6a2ff6f7003edeed115db24730a278f6d62"} Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.233069 4916 scope.go:117] "RemoveContainer" containerID="cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.233232 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.245425 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-erlang-cookie\") pod \"6ffb0836-d978-4f53-9a48-1174b647eeaf\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.245538 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-plugins\") pod \"6ffb0836-d978-4f53-9a48-1174b647eeaf\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.245587 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6ffb0836-d978-4f53-9a48-1174b647eeaf-erlang-cookie-secret\") pod \"6ffb0836-d978-4f53-9a48-1174b647eeaf\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.245622 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-config-data\") pod \"6ffb0836-d978-4f53-9a48-1174b647eeaf\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.245666 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6ffb0836-d978-4f53-9a48-1174b647eeaf-pod-info\") pod \"6ffb0836-d978-4f53-9a48-1174b647eeaf\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.245713 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-server-conf\") pod \"6ffb0836-d978-4f53-9a48-1174b647eeaf\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.245737 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lf2z\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-kube-api-access-7lf2z\") pod \"6ffb0836-d978-4f53-9a48-1174b647eeaf\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.245767 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"6ffb0836-d978-4f53-9a48-1174b647eeaf\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.245812 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-tls\") pod \"6ffb0836-d978-4f53-9a48-1174b647eeaf\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.245850 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-confd\") pod \"6ffb0836-d978-4f53-9a48-1174b647eeaf\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.245865 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-plugins-conf\") pod \"6ffb0836-d978-4f53-9a48-1174b647eeaf\" (UID: \"6ffb0836-d978-4f53-9a48-1174b647eeaf\") " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.247456 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "6ffb0836-d978-4f53-9a48-1174b647eeaf" (UID: "6ffb0836-d978-4f53-9a48-1174b647eeaf"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.248994 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "6ffb0836-d978-4f53-9a48-1174b647eeaf" (UID: "6ffb0836-d978-4f53-9a48-1174b647eeaf"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.250553 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "6ffb0836-d978-4f53-9a48-1174b647eeaf" (UID: "6ffb0836-d978-4f53-9a48-1174b647eeaf"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.252871 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/6ffb0836-d978-4f53-9a48-1174b647eeaf-pod-info" (OuterVolumeSpecName: "pod-info") pod "6ffb0836-d978-4f53-9a48-1174b647eeaf" (UID: "6ffb0836-d978-4f53-9a48-1174b647eeaf"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.256487 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "6ffb0836-d978-4f53-9a48-1174b647eeaf" (UID: "6ffb0836-d978-4f53-9a48-1174b647eeaf"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.266237 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-kube-api-access-7lf2z" (OuterVolumeSpecName: "kube-api-access-7lf2z") pod "6ffb0836-d978-4f53-9a48-1174b647eeaf" (UID: "6ffb0836-d978-4f53-9a48-1174b647eeaf"). InnerVolumeSpecName "kube-api-access-7lf2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.269730 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ffb0836-d978-4f53-9a48-1174b647eeaf-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "6ffb0836-d978-4f53-9a48-1174b647eeaf" (UID: "6ffb0836-d978-4f53-9a48-1174b647eeaf"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.274643 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "persistence") pod "6ffb0836-d978-4f53-9a48-1174b647eeaf" (UID: "6ffb0836-d978-4f53-9a48-1174b647eeaf"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.279749 4916 scope.go:117] "RemoveContainer" containerID="e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.315927 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-config-data" (OuterVolumeSpecName: "config-data") pod "6ffb0836-d978-4f53-9a48-1174b647eeaf" (UID: "6ffb0836-d978-4f53-9a48-1174b647eeaf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.348772 4916 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6ffb0836-d978-4f53-9a48-1174b647eeaf-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.348805 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lf2z\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-kube-api-access-7lf2z\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.348833 4916 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.348842 4916 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.348851 4916 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.348859 4916 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.348867 4916 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.348876 4916 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6ffb0836-d978-4f53-9a48-1174b647eeaf-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.348884 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.373583 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-server-conf" (OuterVolumeSpecName: "server-conf") pod "6ffb0836-d978-4f53-9a48-1174b647eeaf" (UID: "6ffb0836-d978-4f53-9a48-1174b647eeaf"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.378799 4916 scope.go:117] "RemoveContainer" containerID="cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.379861 4916 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 03 19:53:31 crc kubenswrapper[4916]: E1203 19:53:31.380961 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6\": container with ID starting with cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6 not found: ID does not exist" containerID="cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.380997 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6"} err="failed to get container status \"cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6\": rpc error: code = NotFound desc = could not find container \"cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6\": container with ID starting with cbad67960766d67422a29d502d8d7926484e44e4676c9881c92b83265efbd2c6 not found: ID does not exist" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.381017 4916 scope.go:117] "RemoveContainer" containerID="e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7" Dec 03 19:53:31 crc kubenswrapper[4916]: E1203 19:53:31.381318 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7\": container with ID starting with e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7 not found: ID does not exist" containerID="e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.381335 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7"} err="failed to get container status \"e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7\": rpc error: code = NotFound desc = could not find container \"e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7\": container with ID starting with e005a26ad9e53287dbbead59a10b787e97e37886dda649213af82fd548c023c7 not found: ID does not exist" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.421713 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "6ffb0836-d978-4f53-9a48-1174b647eeaf" (UID: "6ffb0836-d978-4f53-9a48-1174b647eeaf"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.450102 4916 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6ffb0836-d978-4f53-9a48-1174b647eeaf-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.450132 4916 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.450148 4916 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6ffb0836-d978-4f53-9a48-1174b647eeaf-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.568052 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.578482 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.597129 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 19:53:31 crc kubenswrapper[4916]: E1203 19:53:31.597719 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ffb0836-d978-4f53-9a48-1174b647eeaf" containerName="rabbitmq" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.597741 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ffb0836-d978-4f53-9a48-1174b647eeaf" containerName="rabbitmq" Dec 03 19:53:31 crc kubenswrapper[4916]: E1203 19:53:31.597785 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ffb0836-d978-4f53-9a48-1174b647eeaf" containerName="setup-container" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.597794 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ffb0836-d978-4f53-9a48-1174b647eeaf" containerName="setup-container" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.598036 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ffb0836-d978-4f53-9a48-1174b647eeaf" containerName="rabbitmq" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.600227 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.602329 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.602446 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.603280 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.603291 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.603306 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.603728 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-xngf2" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.604939 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.611423 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.754283 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.754321 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.754383 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a80b26ac-d55e-4513-9a8d-a70a0b197433-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.754405 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.754433 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.754537 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a80b26ac-d55e-4513-9a8d-a70a0b197433-config-data\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.754797 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a80b26ac-d55e-4513-9a8d-a70a0b197433-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.755010 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n25q9\" (UniqueName: \"kubernetes.io/projected/a80b26ac-d55e-4513-9a8d-a70a0b197433-kube-api-access-n25q9\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.755146 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a80b26ac-d55e-4513-9a8d-a70a0b197433-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.755208 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.755328 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a80b26ac-d55e-4513-9a8d-a70a0b197433-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.856785 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a80b26ac-d55e-4513-9a8d-a70a0b197433-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.856842 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.856882 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.856917 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a80b26ac-d55e-4513-9a8d-a70a0b197433-config-data\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.856955 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a80b26ac-d55e-4513-9a8d-a70a0b197433-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.857012 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n25q9\" (UniqueName: \"kubernetes.io/projected/a80b26ac-d55e-4513-9a8d-a70a0b197433-kube-api-access-n25q9\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.857057 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a80b26ac-d55e-4513-9a8d-a70a0b197433-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.857088 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.857144 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a80b26ac-d55e-4513-9a8d-a70a0b197433-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.857170 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.857192 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.857431 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.857414 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.857729 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.857972 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a80b26ac-d55e-4513-9a8d-a70a0b197433-config-data\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.858101 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a80b26ac-d55e-4513-9a8d-a70a0b197433-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.860534 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a80b26ac-d55e-4513-9a8d-a70a0b197433-server-conf\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.861311 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.861611 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a80b26ac-d55e-4513-9a8d-a70a0b197433-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.862171 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a80b26ac-d55e-4513-9a8d-a70a0b197433-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.863963 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a80b26ac-d55e-4513-9a8d-a70a0b197433-pod-info\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.881293 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n25q9\" (UniqueName: \"kubernetes.io/projected/a80b26ac-d55e-4513-9a8d-a70a0b197433-kube-api-access-n25q9\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.899084 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"a80b26ac-d55e-4513-9a8d-a70a0b197433\") " pod="openstack/rabbitmq-server-0" Dec 03 19:53:31 crc kubenswrapper[4916]: I1203 19:53:31.920691 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 19:53:32 crc kubenswrapper[4916]: I1203 19:53:32.421576 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 19:53:32 crc kubenswrapper[4916]: W1203 19:53:32.432804 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda80b26ac_d55e_4513_9a8d_a70a0b197433.slice/crio-66bddbe550da1b4ea7ee04b3d1c81ce5369c35d0b6ecce9a265503d34fa39e3f WatchSource:0}: Error finding container 66bddbe550da1b4ea7ee04b3d1c81ce5369c35d0b6ecce9a265503d34fa39e3f: Status 404 returned error can't find the container with id 66bddbe550da1b4ea7ee04b3d1c81ce5369c35d0b6ecce9a265503d34fa39e3f Dec 03 19:53:32 crc kubenswrapper[4916]: I1203 19:53:32.492258 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ffb0836-d978-4f53-9a48-1174b647eeaf" path="/var/lib/kubelet/pods/6ffb0836-d978-4f53-9a48-1174b647eeaf/volumes" Dec 03 19:53:33 crc kubenswrapper[4916]: I1203 19:53:33.257894 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a80b26ac-d55e-4513-9a8d-a70a0b197433","Type":"ContainerStarted","Data":"66bddbe550da1b4ea7ee04b3d1c81ce5369c35d0b6ecce9a265503d34fa39e3f"} Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.125075 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.270018 4916 generic.go:334] "Generic (PLEG): container finished" podID="13520585-08f1-45f7-b40d-d53b9f047cfd" containerID="37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a" exitCode=0 Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.270429 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.271024 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"13520585-08f1-45f7-b40d-d53b9f047cfd","Type":"ContainerDied","Data":"37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a"} Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.271056 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"13520585-08f1-45f7-b40d-d53b9f047cfd","Type":"ContainerDied","Data":"f43c4f8d8a0ee9385c3c8c3b6b0a03f6a9c38db83c860835f0a6751ecebe4191"} Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.271077 4916 scope.go:117] "RemoveContainer" containerID="37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.273439 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a80b26ac-d55e-4513-9a8d-a70a0b197433","Type":"ContainerStarted","Data":"bab289d479886ed878f55e80f08586d05dfb2b487b7baac5dea90ba34114f701"} Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.300513 4916 scope.go:117] "RemoveContainer" containerID="4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.333649 4916 scope.go:117] "RemoveContainer" containerID="37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a" Dec 03 19:53:34 crc kubenswrapper[4916]: E1203 19:53:34.334128 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a\": container with ID starting with 37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a not found: ID does not exist" containerID="37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.334172 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a"} err="failed to get container status \"37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a\": rpc error: code = NotFound desc = could not find container \"37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a\": container with ID starting with 37cadb7ec3153e6b8e3967dd0d364fa68c52e104fbd228544191c22ff379df5a not found: ID does not exist" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.334213 4916 scope.go:117] "RemoveContainer" containerID="4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a" Dec 03 19:53:34 crc kubenswrapper[4916]: E1203 19:53:34.334486 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a\": container with ID starting with 4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a not found: ID does not exist" containerID="4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.334558 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a"} err="failed to get container status \"4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a\": rpc error: code = NotFound desc = could not find container \"4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a\": container with ID starting with 4cfe03d78cf295ecc3df560fef8138012abf610d6f76be3806893407a0d2655a not found: ID does not exist" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.335444 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/13520585-08f1-45f7-b40d-d53b9f047cfd-erlang-cookie-secret\") pod \"13520585-08f1-45f7-b40d-d53b9f047cfd\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.335516 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-plugins-conf\") pod \"13520585-08f1-45f7-b40d-d53b9f047cfd\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.336076 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-erlang-cookie\") pod \"13520585-08f1-45f7-b40d-d53b9f047cfd\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.336115 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-tls\") pod \"13520585-08f1-45f7-b40d-d53b9f047cfd\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.336242 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7sjs\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-kube-api-access-j7sjs\") pod \"13520585-08f1-45f7-b40d-d53b9f047cfd\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.336392 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-config-data\") pod \"13520585-08f1-45f7-b40d-d53b9f047cfd\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.336704 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-confd\") pod \"13520585-08f1-45f7-b40d-d53b9f047cfd\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.336900 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/13520585-08f1-45f7-b40d-d53b9f047cfd-pod-info\") pod \"13520585-08f1-45f7-b40d-d53b9f047cfd\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.336932 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"13520585-08f1-45f7-b40d-d53b9f047cfd\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.337081 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-plugins\") pod \"13520585-08f1-45f7-b40d-d53b9f047cfd\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.337104 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-server-conf\") pod \"13520585-08f1-45f7-b40d-d53b9f047cfd\" (UID: \"13520585-08f1-45f7-b40d-d53b9f047cfd\") " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.338511 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "13520585-08f1-45f7-b40d-d53b9f047cfd" (UID: "13520585-08f1-45f7-b40d-d53b9f047cfd"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.339983 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "13520585-08f1-45f7-b40d-d53b9f047cfd" (UID: "13520585-08f1-45f7-b40d-d53b9f047cfd"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.340129 4916 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.340758 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "13520585-08f1-45f7-b40d-d53b9f047cfd" (UID: "13520585-08f1-45f7-b40d-d53b9f047cfd"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.343648 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/13520585-08f1-45f7-b40d-d53b9f047cfd-pod-info" (OuterVolumeSpecName: "pod-info") pod "13520585-08f1-45f7-b40d-d53b9f047cfd" (UID: "13520585-08f1-45f7-b40d-d53b9f047cfd"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.343913 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "13520585-08f1-45f7-b40d-d53b9f047cfd" (UID: "13520585-08f1-45f7-b40d-d53b9f047cfd"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.346018 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-kube-api-access-j7sjs" (OuterVolumeSpecName: "kube-api-access-j7sjs") pod "13520585-08f1-45f7-b40d-d53b9f047cfd" (UID: "13520585-08f1-45f7-b40d-d53b9f047cfd"). InnerVolumeSpecName "kube-api-access-j7sjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.347471 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "13520585-08f1-45f7-b40d-d53b9f047cfd" (UID: "13520585-08f1-45f7-b40d-d53b9f047cfd"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.377251 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13520585-08f1-45f7-b40d-d53b9f047cfd-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "13520585-08f1-45f7-b40d-d53b9f047cfd" (UID: "13520585-08f1-45f7-b40d-d53b9f047cfd"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.385236 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-config-data" (OuterVolumeSpecName: "config-data") pod "13520585-08f1-45f7-b40d-d53b9f047cfd" (UID: "13520585-08f1-45f7-b40d-d53b9f047cfd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.415934 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-server-conf" (OuterVolumeSpecName: "server-conf") pod "13520585-08f1-45f7-b40d-d53b9f047cfd" (UID: "13520585-08f1-45f7-b40d-d53b9f047cfd"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.444736 4916 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/13520585-08f1-45f7-b40d-d53b9f047cfd-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.444783 4916 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.444798 4916 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.444814 4916 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.444825 4916 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/13520585-08f1-45f7-b40d-d53b9f047cfd-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.444837 4916 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.444850 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7sjs\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-kube-api-access-j7sjs\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.444862 4916 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.444872 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13520585-08f1-45f7-b40d-d53b9f047cfd-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.469441 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "13520585-08f1-45f7-b40d-d53b9f047cfd" (UID: "13520585-08f1-45f7-b40d-d53b9f047cfd"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.470082 4916 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 03 19:53:34 crc kubenswrapper[4916]: E1203 19:53:34.543966 4916 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13520585_08f1_45f7_b40d_d53b9f047cfd.slice/crio-f43c4f8d8a0ee9385c3c8c3b6b0a03f6a9c38db83c860835f0a6751ecebe4191\": RecentStats: unable to find data in memory cache]" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.549396 4916 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/13520585-08f1-45f7-b40d-d53b9f047cfd-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.549608 4916 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.600939 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.610186 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.622039 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 19:53:34 crc kubenswrapper[4916]: E1203 19:53:34.622502 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13520585-08f1-45f7-b40d-d53b9f047cfd" containerName="rabbitmq" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.622523 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="13520585-08f1-45f7-b40d-d53b9f047cfd" containerName="rabbitmq" Dec 03 19:53:34 crc kubenswrapper[4916]: E1203 19:53:34.622545 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13520585-08f1-45f7-b40d-d53b9f047cfd" containerName="setup-container" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.622553 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="13520585-08f1-45f7-b40d-d53b9f047cfd" containerName="setup-container" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.622806 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="13520585-08f1-45f7-b40d-d53b9f047cfd" containerName="rabbitmq" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.623891 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.627973 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.628239 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.628416 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.628695 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jlv62" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.628861 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.630816 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.632667 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.637504 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.752680 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3d2c7b8-c85e-4806-986b-55b486864e84-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.752760 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.752796 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.752825 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3d2c7b8-c85e-4806-986b-55b486864e84-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.752876 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3d2c7b8-c85e-4806-986b-55b486864e84-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.753904 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3d2c7b8-c85e-4806-986b-55b486864e84-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.754105 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.754135 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3d2c7b8-c85e-4806-986b-55b486864e84-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.754272 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.754367 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5h8h9\" (UniqueName: \"kubernetes.io/projected/c3d2c7b8-c85e-4806-986b-55b486864e84-kube-api-access-5h8h9\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.754418 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856360 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856431 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5h8h9\" (UniqueName: \"kubernetes.io/projected/c3d2c7b8-c85e-4806-986b-55b486864e84-kube-api-access-5h8h9\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856466 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856507 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3d2c7b8-c85e-4806-986b-55b486864e84-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856539 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856584 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856613 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3d2c7b8-c85e-4806-986b-55b486864e84-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856663 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3d2c7b8-c85e-4806-986b-55b486864e84-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856700 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3d2c7b8-c85e-4806-986b-55b486864e84-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856757 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856783 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3d2c7b8-c85e-4806-986b-55b486864e84-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.856780 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.857201 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.857276 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.857691 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c3d2c7b8-c85e-4806-986b-55b486864e84-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.858003 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c3d2c7b8-c85e-4806-986b-55b486864e84-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.858331 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c3d2c7b8-c85e-4806-986b-55b486864e84-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.862061 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.862501 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c3d2c7b8-c85e-4806-986b-55b486864e84-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.862893 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c3d2c7b8-c85e-4806-986b-55b486864e84-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.863811 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c3d2c7b8-c85e-4806-986b-55b486864e84-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.877776 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5h8h9\" (UniqueName: \"kubernetes.io/projected/c3d2c7b8-c85e-4806-986b-55b486864e84-kube-api-access-5h8h9\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.895349 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c3d2c7b8-c85e-4806-986b-55b486864e84\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:34 crc kubenswrapper[4916]: I1203 19:53:34.954407 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:53:35 crc kubenswrapper[4916]: I1203 19:53:35.445928 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.300621 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c3d2c7b8-c85e-4806-986b-55b486864e84","Type":"ContainerStarted","Data":"6d05632a37933083c61d959b925c443efa118fdbf1f5a2183a6c5b4edae0bf4d"} Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.494688 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13520585-08f1-45f7-b40d-d53b9f047cfd" path="/var/lib/kubelet/pods/13520585-08f1-45f7-b40d-d53b9f047cfd/volumes" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.495768 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7d84b4d45c-fsrrt"] Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.498048 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.500078 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.507392 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d84b4d45c-fsrrt"] Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.689935 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-swift-storage-0\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.690032 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-nb\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.690076 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-svc\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.690105 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-openstack-edpm-ipam\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.690136 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-sb\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.690157 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-config\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.690230 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77rz9\" (UniqueName: \"kubernetes.io/projected/8b7b7632-1720-4244-a006-8e30861fcccf-kube-api-access-77rz9\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.791638 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-swift-storage-0\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.791702 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-nb\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.791725 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-svc\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.791743 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-openstack-edpm-ipam\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.791766 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-sb\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.791783 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-config\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.791805 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77rz9\" (UniqueName: \"kubernetes.io/projected/8b7b7632-1720-4244-a006-8e30861fcccf-kube-api-access-77rz9\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.792754 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-swift-storage-0\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.792786 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-sb\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.792918 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-config\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.792935 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-svc\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.793118 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-nb\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.793447 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-openstack-edpm-ipam\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.811797 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77rz9\" (UniqueName: \"kubernetes.io/projected/8b7b7632-1720-4244-a006-8e30861fcccf-kube-api-access-77rz9\") pod \"dnsmasq-dns-7d84b4d45c-fsrrt\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:36 crc kubenswrapper[4916]: I1203 19:53:36.830639 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:37 crc kubenswrapper[4916]: I1203 19:53:37.352934 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d84b4d45c-fsrrt"] Dec 03 19:53:37 crc kubenswrapper[4916]: W1203 19:53:37.357374 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b7b7632_1720_4244_a006_8e30861fcccf.slice/crio-e123e978df8b0e96b3fd04d078aa97b73c62cc7c0e16ed50a00064fd29788a52 WatchSource:0}: Error finding container e123e978df8b0e96b3fd04d078aa97b73c62cc7c0e16ed50a00064fd29788a52: Status 404 returned error can't find the container with id e123e978df8b0e96b3fd04d078aa97b73c62cc7c0e16ed50a00064fd29788a52 Dec 03 19:53:38 crc kubenswrapper[4916]: I1203 19:53:38.334964 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c3d2c7b8-c85e-4806-986b-55b486864e84","Type":"ContainerStarted","Data":"7964e63cbcb97f6308e36357a4ac5e4f34fd7ebbfd31d25274459894ad90e43d"} Dec 03 19:53:38 crc kubenswrapper[4916]: I1203 19:53:38.336896 4916 generic.go:334] "Generic (PLEG): container finished" podID="8b7b7632-1720-4244-a006-8e30861fcccf" containerID="c6393f216a4167fe3cc179eb31fcbad36b7c904c35e0f35200e8e4309263a804" exitCode=0 Dec 03 19:53:38 crc kubenswrapper[4916]: I1203 19:53:38.336944 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" event={"ID":"8b7b7632-1720-4244-a006-8e30861fcccf","Type":"ContainerDied","Data":"c6393f216a4167fe3cc179eb31fcbad36b7c904c35e0f35200e8e4309263a804"} Dec 03 19:53:38 crc kubenswrapper[4916]: I1203 19:53:38.336977 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" event={"ID":"8b7b7632-1720-4244-a006-8e30861fcccf","Type":"ContainerStarted","Data":"e123e978df8b0e96b3fd04d078aa97b73c62cc7c0e16ed50a00064fd29788a52"} Dec 03 19:53:39 crc kubenswrapper[4916]: I1203 19:53:39.352894 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" event={"ID":"8b7b7632-1720-4244-a006-8e30861fcccf","Type":"ContainerStarted","Data":"4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac"} Dec 03 19:53:39 crc kubenswrapper[4916]: I1203 19:53:39.400634 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" podStartSLOduration=3.400610663 podStartE2EDuration="3.400610663s" podCreationTimestamp="2025-12-03 19:53:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:53:39.38930507 +0000 UTC m=+1435.352115426" watchObservedRunningTime="2025-12-03 19:53:39.400610663 +0000 UTC m=+1435.363420969" Dec 03 19:53:40 crc kubenswrapper[4916]: I1203 19:53:40.368122 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:46 crc kubenswrapper[4916]: I1203 19:53:46.159233 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:53:46 crc kubenswrapper[4916]: I1203 19:53:46.159839 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:53:46 crc kubenswrapper[4916]: I1203 19:53:46.833792 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:46 crc kubenswrapper[4916]: I1203 19:53:46.916817 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7bbf7cf9-2drw5"] Dec 03 19:53:46 crc kubenswrapper[4916]: I1203 19:53:46.917071 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" podUID="646d792d-4828-48b7-955a-2108efeb5ebc" containerName="dnsmasq-dns" containerID="cri-o://3b5a058e96668f6b872e5a7e9fa56bb9461f0a2729095081a373414b52ab9699" gracePeriod=10 Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.064643 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f6df4f56c-xfpvl"] Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.066766 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.088323 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f6df4f56c-xfpvl"] Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.122592 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-config\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.122663 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.122698 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-dns-svc\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.122772 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-openstack-edpm-ipam\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.122801 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.122862 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7j8qf\" (UniqueName: \"kubernetes.io/projected/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-kube-api-access-7j8qf\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.122978 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.224742 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-openstack-edpm-ipam\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.224796 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.224848 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7j8qf\" (UniqueName: \"kubernetes.io/projected/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-kube-api-access-7j8qf\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.224875 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.224913 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-config\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.224944 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.224974 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-dns-svc\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.225934 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-dns-svc\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.226458 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.226709 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.227303 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.228668 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-openstack-edpm-ipam\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.236942 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-config\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.267610 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7j8qf\" (UniqueName: \"kubernetes.io/projected/5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62-kube-api-access-7j8qf\") pod \"dnsmasq-dns-6f6df4f56c-xfpvl\" (UID: \"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62\") " pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.433097 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.458063 4916 generic.go:334] "Generic (PLEG): container finished" podID="646d792d-4828-48b7-955a-2108efeb5ebc" containerID="3b5a058e96668f6b872e5a7e9fa56bb9461f0a2729095081a373414b52ab9699" exitCode=0 Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.458101 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" event={"ID":"646d792d-4828-48b7-955a-2108efeb5ebc","Type":"ContainerDied","Data":"3b5a058e96668f6b872e5a7e9fa56bb9461f0a2729095081a373414b52ab9699"} Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.458126 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" event={"ID":"646d792d-4828-48b7-955a-2108efeb5ebc","Type":"ContainerDied","Data":"9ea00c5ead358d3005df525be168ec241c17e02cec34c6d0da6985eb3ef11412"} Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.458142 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ea00c5ead358d3005df525be168ec241c17e02cec34c6d0da6985eb3ef11412" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.496396 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.632783 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-config\") pod \"646d792d-4828-48b7-955a-2108efeb5ebc\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.632831 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-nb\") pod \"646d792d-4828-48b7-955a-2108efeb5ebc\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.632853 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-svc\") pod \"646d792d-4828-48b7-955a-2108efeb5ebc\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.632935 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-sb\") pod \"646d792d-4828-48b7-955a-2108efeb5ebc\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.632961 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-swift-storage-0\") pod \"646d792d-4828-48b7-955a-2108efeb5ebc\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.632985 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgr9d\" (UniqueName: \"kubernetes.io/projected/646d792d-4828-48b7-955a-2108efeb5ebc-kube-api-access-wgr9d\") pod \"646d792d-4828-48b7-955a-2108efeb5ebc\" (UID: \"646d792d-4828-48b7-955a-2108efeb5ebc\") " Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.638020 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/646d792d-4828-48b7-955a-2108efeb5ebc-kube-api-access-wgr9d" (OuterVolumeSpecName: "kube-api-access-wgr9d") pod "646d792d-4828-48b7-955a-2108efeb5ebc" (UID: "646d792d-4828-48b7-955a-2108efeb5ebc"). InnerVolumeSpecName "kube-api-access-wgr9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.691539 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "646d792d-4828-48b7-955a-2108efeb5ebc" (UID: "646d792d-4828-48b7-955a-2108efeb5ebc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.711965 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "646d792d-4828-48b7-955a-2108efeb5ebc" (UID: "646d792d-4828-48b7-955a-2108efeb5ebc"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.712334 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-config" (OuterVolumeSpecName: "config") pod "646d792d-4828-48b7-955a-2108efeb5ebc" (UID: "646d792d-4828-48b7-955a-2108efeb5ebc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.720134 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "646d792d-4828-48b7-955a-2108efeb5ebc" (UID: "646d792d-4828-48b7-955a-2108efeb5ebc"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.730747 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "646d792d-4828-48b7-955a-2108efeb5ebc" (UID: "646d792d-4828-48b7-955a-2108efeb5ebc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.735681 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.735706 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.735718 4916 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.735748 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgr9d\" (UniqueName: \"kubernetes.io/projected/646d792d-4828-48b7-955a-2108efeb5ebc-kube-api-access-wgr9d\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.735757 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.735766 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/646d792d-4828-48b7-955a-2108efeb5ebc-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:47 crc kubenswrapper[4916]: I1203 19:53:47.907925 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f6df4f56c-xfpvl"] Dec 03 19:53:48 crc kubenswrapper[4916]: I1203 19:53:48.469070 4916 generic.go:334] "Generic (PLEG): container finished" podID="5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62" containerID="d101f3c6dbadebed7b69e7e08997553de797b84152459473adf872dacaf87f1a" exitCode=0 Dec 03 19:53:48 crc kubenswrapper[4916]: I1203 19:53:48.469151 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" event={"ID":"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62","Type":"ContainerDied","Data":"d101f3c6dbadebed7b69e7e08997553de797b84152459473adf872dacaf87f1a"} Dec 03 19:53:48 crc kubenswrapper[4916]: I1203 19:53:48.469176 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:53:48 crc kubenswrapper[4916]: I1203 19:53:48.469192 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" event={"ID":"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62","Type":"ContainerStarted","Data":"891960006d5747a71c5df62aaadb893b091295c7b59fb5c485a5f5e7ce999f95"} Dec 03 19:53:49 crc kubenswrapper[4916]: I1203 19:53:49.481741 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" event={"ID":"5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62","Type":"ContainerStarted","Data":"94eef893478ab02b1b20a2907d115370a507d4c364ce1397a8962c675777f5df"} Dec 03 19:53:49 crc kubenswrapper[4916]: I1203 19:53:49.482298 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:49 crc kubenswrapper[4916]: I1203 19:53:49.508914 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" podStartSLOduration=2.508891952 podStartE2EDuration="2.508891952s" podCreationTimestamp="2025-12-03 19:53:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:53:49.499774057 +0000 UTC m=+1445.462584323" watchObservedRunningTime="2025-12-03 19:53:49.508891952 +0000 UTC m=+1445.471702218" Dec 03 19:53:57 crc kubenswrapper[4916]: I1203 19:53:57.435845 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6f6df4f56c-xfpvl" Dec 03 19:53:57 crc kubenswrapper[4916]: I1203 19:53:57.539002 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d84b4d45c-fsrrt"] Dec 03 19:53:57 crc kubenswrapper[4916]: I1203 19:53:57.539285 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" podUID="8b7b7632-1720-4244-a006-8e30861fcccf" containerName="dnsmasq-dns" containerID="cri-o://4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac" gracePeriod=10 Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.019847 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.083654 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-swift-storage-0\") pod \"8b7b7632-1720-4244-a006-8e30861fcccf\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.084094 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-sb\") pod \"8b7b7632-1720-4244-a006-8e30861fcccf\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.084143 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-svc\") pod \"8b7b7632-1720-4244-a006-8e30861fcccf\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.084267 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-config\") pod \"8b7b7632-1720-4244-a006-8e30861fcccf\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.084324 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-openstack-edpm-ipam\") pod \"8b7b7632-1720-4244-a006-8e30861fcccf\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.084378 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77rz9\" (UniqueName: \"kubernetes.io/projected/8b7b7632-1720-4244-a006-8e30861fcccf-kube-api-access-77rz9\") pod \"8b7b7632-1720-4244-a006-8e30861fcccf\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.084414 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-nb\") pod \"8b7b7632-1720-4244-a006-8e30861fcccf\" (UID: \"8b7b7632-1720-4244-a006-8e30861fcccf\") " Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.101394 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b7b7632-1720-4244-a006-8e30861fcccf-kube-api-access-77rz9" (OuterVolumeSpecName: "kube-api-access-77rz9") pod "8b7b7632-1720-4244-a006-8e30861fcccf" (UID: "8b7b7632-1720-4244-a006-8e30861fcccf"). InnerVolumeSpecName "kube-api-access-77rz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.138641 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8b7b7632-1720-4244-a006-8e30861fcccf" (UID: "8b7b7632-1720-4244-a006-8e30861fcccf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.141189 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8b7b7632-1720-4244-a006-8e30861fcccf" (UID: "8b7b7632-1720-4244-a006-8e30861fcccf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.148016 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8b7b7632-1720-4244-a006-8e30861fcccf" (UID: "8b7b7632-1720-4244-a006-8e30861fcccf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.148589 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8b7b7632-1720-4244-a006-8e30861fcccf" (UID: "8b7b7632-1720-4244-a006-8e30861fcccf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.152502 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "8b7b7632-1720-4244-a006-8e30861fcccf" (UID: "8b7b7632-1720-4244-a006-8e30861fcccf"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.155933 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-config" (OuterVolumeSpecName: "config") pod "8b7b7632-1720-4244-a006-8e30861fcccf" (UID: "8b7b7632-1720-4244-a006-8e30861fcccf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.187696 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-config\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.187729 4916 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.187743 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77rz9\" (UniqueName: \"kubernetes.io/projected/8b7b7632-1720-4244-a006-8e30861fcccf-kube-api-access-77rz9\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.187755 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.187766 4916 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.187777 4916 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.187788 4916 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b7b7632-1720-4244-a006-8e30861fcccf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.584066 4916 generic.go:334] "Generic (PLEG): container finished" podID="8b7b7632-1720-4244-a006-8e30861fcccf" containerID="4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac" exitCode=0 Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.584110 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.584114 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" event={"ID":"8b7b7632-1720-4244-a006-8e30861fcccf","Type":"ContainerDied","Data":"4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac"} Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.584149 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d84b4d45c-fsrrt" event={"ID":"8b7b7632-1720-4244-a006-8e30861fcccf","Type":"ContainerDied","Data":"e123e978df8b0e96b3fd04d078aa97b73c62cc7c0e16ed50a00064fd29788a52"} Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.584170 4916 scope.go:117] "RemoveContainer" containerID="4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.612728 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d84b4d45c-fsrrt"] Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.616259 4916 scope.go:117] "RemoveContainer" containerID="c6393f216a4167fe3cc179eb31fcbad36b7c904c35e0f35200e8e4309263a804" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.621500 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7d84b4d45c-fsrrt"] Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.641849 4916 scope.go:117] "RemoveContainer" containerID="4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac" Dec 03 19:53:58 crc kubenswrapper[4916]: E1203 19:53:58.642253 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac\": container with ID starting with 4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac not found: ID does not exist" containerID="4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.642384 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac"} err="failed to get container status \"4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac\": rpc error: code = NotFound desc = could not find container \"4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac\": container with ID starting with 4b791ffeb7efe9256af7ceb3a0f8ad4c4544d117c9fdbb701560c247e4d730ac not found: ID does not exist" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.642415 4916 scope.go:117] "RemoveContainer" containerID="c6393f216a4167fe3cc179eb31fcbad36b7c904c35e0f35200e8e4309263a804" Dec 03 19:53:58 crc kubenswrapper[4916]: E1203 19:53:58.643063 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6393f216a4167fe3cc179eb31fcbad36b7c904c35e0f35200e8e4309263a804\": container with ID starting with c6393f216a4167fe3cc179eb31fcbad36b7c904c35e0f35200e8e4309263a804 not found: ID does not exist" containerID="c6393f216a4167fe3cc179eb31fcbad36b7c904c35e0f35200e8e4309263a804" Dec 03 19:53:58 crc kubenswrapper[4916]: I1203 19:53:58.643105 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6393f216a4167fe3cc179eb31fcbad36b7c904c35e0f35200e8e4309263a804"} err="failed to get container status \"c6393f216a4167fe3cc179eb31fcbad36b7c904c35e0f35200e8e4309263a804\": rpc error: code = NotFound desc = could not find container \"c6393f216a4167fe3cc179eb31fcbad36b7c904c35e0f35200e8e4309263a804\": container with ID starting with c6393f216a4167fe3cc179eb31fcbad36b7c904c35e0f35200e8e4309263a804 not found: ID does not exist" Dec 03 19:54:00 crc kubenswrapper[4916]: I1203 19:54:00.488342 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b7b7632-1720-4244-a006-8e30861fcccf" path="/var/lib/kubelet/pods/8b7b7632-1720-4244-a006-8e30861fcccf/volumes" Dec 03 19:54:06 crc kubenswrapper[4916]: I1203 19:54:06.679524 4916 generic.go:334] "Generic (PLEG): container finished" podID="a80b26ac-d55e-4513-9a8d-a70a0b197433" containerID="bab289d479886ed878f55e80f08586d05dfb2b487b7baac5dea90ba34114f701" exitCode=0 Dec 03 19:54:06 crc kubenswrapper[4916]: I1203 19:54:06.679678 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a80b26ac-d55e-4513-9a8d-a70a0b197433","Type":"ContainerDied","Data":"bab289d479886ed878f55e80f08586d05dfb2b487b7baac5dea90ba34114f701"} Dec 03 19:54:07 crc kubenswrapper[4916]: I1203 19:54:07.694617 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"a80b26ac-d55e-4513-9a8d-a70a0b197433","Type":"ContainerStarted","Data":"b121f6efcbcc717c0a665d8a1e86be389b47780c1b34b2cbaa801b2a1a7b59e6"} Dec 03 19:54:07 crc kubenswrapper[4916]: I1203 19:54:07.695446 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 19:54:07 crc kubenswrapper[4916]: I1203 19:54:07.733257 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.733234244 podStartE2EDuration="36.733234244s" podCreationTimestamp="2025-12-03 19:53:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:54:07.718299223 +0000 UTC m=+1463.681109509" watchObservedRunningTime="2025-12-03 19:54:07.733234244 +0000 UTC m=+1463.696044510" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.721624 4916 generic.go:334] "Generic (PLEG): container finished" podID="c3d2c7b8-c85e-4806-986b-55b486864e84" containerID="7964e63cbcb97f6308e36357a4ac5e4f34fd7ebbfd31d25274459894ad90e43d" exitCode=0 Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.721715 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c3d2c7b8-c85e-4806-986b-55b486864e84","Type":"ContainerDied","Data":"7964e63cbcb97f6308e36357a4ac5e4f34fd7ebbfd31d25274459894ad90e43d"} Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.778068 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd"] Dec 03 19:54:10 crc kubenswrapper[4916]: E1203 19:54:10.778459 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b7b7632-1720-4244-a006-8e30861fcccf" containerName="init" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.778477 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b7b7632-1720-4244-a006-8e30861fcccf" containerName="init" Dec 03 19:54:10 crc kubenswrapper[4916]: E1203 19:54:10.778494 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="646d792d-4828-48b7-955a-2108efeb5ebc" containerName="dnsmasq-dns" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.778500 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="646d792d-4828-48b7-955a-2108efeb5ebc" containerName="dnsmasq-dns" Dec 03 19:54:10 crc kubenswrapper[4916]: E1203 19:54:10.778521 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="646d792d-4828-48b7-955a-2108efeb5ebc" containerName="init" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.778526 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="646d792d-4828-48b7-955a-2108efeb5ebc" containerName="init" Dec 03 19:54:10 crc kubenswrapper[4916]: E1203 19:54:10.778539 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b7b7632-1720-4244-a006-8e30861fcccf" containerName="dnsmasq-dns" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.778545 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b7b7632-1720-4244-a006-8e30861fcccf" containerName="dnsmasq-dns" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.778738 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b7b7632-1720-4244-a006-8e30861fcccf" containerName="dnsmasq-dns" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.778755 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="646d792d-4828-48b7-955a-2108efeb5ebc" containerName="dnsmasq-dns" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.779433 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.788912 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.789179 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.790286 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.791890 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.820790 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd"] Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.834424 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xpnl\" (UniqueName: \"kubernetes.io/projected/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-kube-api-access-4xpnl\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.834506 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.834611 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.834659 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.937614 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xpnl\" (UniqueName: \"kubernetes.io/projected/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-kube-api-access-4xpnl\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.937678 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.937754 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.937790 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.942820 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.943796 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.944612 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:10 crc kubenswrapper[4916]: I1203 19:54:10.953970 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xpnl\" (UniqueName: \"kubernetes.io/projected/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-kube-api-access-4xpnl\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:11 crc kubenswrapper[4916]: I1203 19:54:11.234749 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:12 crc kubenswrapper[4916]: I1203 19:54:11.737866 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"c3d2c7b8-c85e-4806-986b-55b486864e84","Type":"ContainerStarted","Data":"d0aaae4a5ebacaaeaeb140e6c5dcec978d46a5742836739de38fa089cd297cf0"} Dec 03 19:54:12 crc kubenswrapper[4916]: I1203 19:54:11.738695 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:54:12 crc kubenswrapper[4916]: I1203 19:54:11.762994 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.762975207 podStartE2EDuration="37.762975207s" podCreationTimestamp="2025-12-03 19:53:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 19:54:11.755985589 +0000 UTC m=+1467.718795855" watchObservedRunningTime="2025-12-03 19:54:11.762975207 +0000 UTC m=+1467.725785473" Dec 03 19:54:12 crc kubenswrapper[4916]: I1203 19:54:12.471020 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd"] Dec 03 19:54:12 crc kubenswrapper[4916]: W1203 19:54:12.476943 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2eaefa8_6147_45c5_ae3e_77e0d47c2d11.slice/crio-5217b59be9aac8450a9bd2e00b3fe5b09882052f5435fb8af3069de33ca16762 WatchSource:0}: Error finding container 5217b59be9aac8450a9bd2e00b3fe5b09882052f5435fb8af3069de33ca16762: Status 404 returned error can't find the container with id 5217b59be9aac8450a9bd2e00b3fe5b09882052f5435fb8af3069de33ca16762 Dec 03 19:54:12 crc kubenswrapper[4916]: I1203 19:54:12.751057 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" event={"ID":"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11","Type":"ContainerStarted","Data":"5217b59be9aac8450a9bd2e00b3fe5b09882052f5435fb8af3069de33ca16762"} Dec 03 19:54:16 crc kubenswrapper[4916]: I1203 19:54:16.159551 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:54:16 crc kubenswrapper[4916]: I1203 19:54:16.159944 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.108608 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-grzmd"] Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.110922 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.117151 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-grzmd"] Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.196411 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6dzn\" (UniqueName: \"kubernetes.io/projected/eb345a70-1de4-46fd-bfbb-452026cde957-kube-api-access-n6dzn\") pod \"certified-operators-grzmd\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.196480 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-catalog-content\") pod \"certified-operators-grzmd\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.196945 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-utilities\") pod \"certified-operators-grzmd\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.299031 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-utilities\") pod \"certified-operators-grzmd\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.299127 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6dzn\" (UniqueName: \"kubernetes.io/projected/eb345a70-1de4-46fd-bfbb-452026cde957-kube-api-access-n6dzn\") pod \"certified-operators-grzmd\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.299192 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-catalog-content\") pod \"certified-operators-grzmd\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.299591 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-utilities\") pod \"certified-operators-grzmd\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.299714 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-catalog-content\") pod \"certified-operators-grzmd\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.317857 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6dzn\" (UniqueName: \"kubernetes.io/projected/eb345a70-1de4-46fd-bfbb-452026cde957-kube-api-access-n6dzn\") pod \"certified-operators-grzmd\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:17 crc kubenswrapper[4916]: I1203 19:54:17.440185 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:18 crc kubenswrapper[4916]: I1203 19:54:18.655734 4916 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod646d792d-4828-48b7-955a-2108efeb5ebc"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod646d792d-4828-48b7-955a-2108efeb5ebc] : Timed out while waiting for systemd to remove kubepods-besteffort-pod646d792d_4828_48b7_955a_2108efeb5ebc.slice" Dec 03 19:54:18 crc kubenswrapper[4916]: E1203 19:54:18.655809 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod646d792d-4828-48b7-955a-2108efeb5ebc] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod646d792d-4828-48b7-955a-2108efeb5ebc] : Timed out while waiting for systemd to remove kubepods-besteffort-pod646d792d_4828_48b7_955a_2108efeb5ebc.slice" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" podUID="646d792d-4828-48b7-955a-2108efeb5ebc" Dec 03 19:54:18 crc kubenswrapper[4916]: I1203 19:54:18.811646 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7bbf7cf9-2drw5" Dec 03 19:54:18 crc kubenswrapper[4916]: I1203 19:54:18.850735 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7bbf7cf9-2drw5"] Dec 03 19:54:18 crc kubenswrapper[4916]: I1203 19:54:18.861012 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b7bbf7cf9-2drw5"] Dec 03 19:54:20 crc kubenswrapper[4916]: I1203 19:54:20.496229 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="646d792d-4828-48b7-955a-2108efeb5ebc" path="/var/lib/kubelet/pods/646d792d-4828-48b7-955a-2108efeb5ebc/volumes" Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.696351 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-v75z5"] Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.724004 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.755305 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-v75z5"] Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.782923 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-utilities\") pod \"redhat-marketplace-v75z5\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.783297 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t97sn\" (UniqueName: \"kubernetes.io/projected/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-kube-api-access-t97sn\") pod \"redhat-marketplace-v75z5\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.783408 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-catalog-content\") pod \"redhat-marketplace-v75z5\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.886682 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t97sn\" (UniqueName: \"kubernetes.io/projected/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-kube-api-access-t97sn\") pod \"redhat-marketplace-v75z5\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.887226 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-catalog-content\") pod \"redhat-marketplace-v75z5\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.887797 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-catalog-content\") pod \"redhat-marketplace-v75z5\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.888061 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-utilities\") pod \"redhat-marketplace-v75z5\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.888398 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-utilities\") pod \"redhat-marketplace-v75z5\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.906888 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t97sn\" (UniqueName: \"kubernetes.io/projected/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-kube-api-access-t97sn\") pod \"redhat-marketplace-v75z5\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:21 crc kubenswrapper[4916]: I1203 19:54:21.925787 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 19:54:22 crc kubenswrapper[4916]: I1203 19:54:22.072906 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:24 crc kubenswrapper[4916]: I1203 19:54:24.080913 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-grzmd"] Dec 03 19:54:24 crc kubenswrapper[4916]: W1203 19:54:24.089776 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb345a70_1de4_46fd_bfbb_452026cde957.slice/crio-2a6d61bc25dc13b0a45af2f8cf8aed0e981020c6a8fe8244eea39a8f46cadb56 WatchSource:0}: Error finding container 2a6d61bc25dc13b0a45af2f8cf8aed0e981020c6a8fe8244eea39a8f46cadb56: Status 404 returned error can't find the container with id 2a6d61bc25dc13b0a45af2f8cf8aed0e981020c6a8fe8244eea39a8f46cadb56 Dec 03 19:54:24 crc kubenswrapper[4916]: I1203 19:54:24.091874 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-v75z5"] Dec 03 19:54:24 crc kubenswrapper[4916]: W1203 19:54:24.095704 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb30ceb3a_19d5_477a_8abb_e8241ea2e35b.slice/crio-0442b9ff4e3c045e2c880443b4405a3234fab112ef8234fb099285da4c876b71 WatchSource:0}: Error finding container 0442b9ff4e3c045e2c880443b4405a3234fab112ef8234fb099285da4c876b71: Status 404 returned error can't find the container with id 0442b9ff4e3c045e2c880443b4405a3234fab112ef8234fb099285da4c876b71 Dec 03 19:54:24 crc kubenswrapper[4916]: I1203 19:54:24.873038 4916 generic.go:334] "Generic (PLEG): container finished" podID="eb345a70-1de4-46fd-bfbb-452026cde957" containerID="afa372d97a1b4e1143bde660a2a8ca9abf6e88f814efca70f1301f70be31d195" exitCode=0 Dec 03 19:54:24 crc kubenswrapper[4916]: I1203 19:54:24.873207 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzmd" event={"ID":"eb345a70-1de4-46fd-bfbb-452026cde957","Type":"ContainerDied","Data":"afa372d97a1b4e1143bde660a2a8ca9abf6e88f814efca70f1301f70be31d195"} Dec 03 19:54:24 crc kubenswrapper[4916]: I1203 19:54:24.873260 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzmd" event={"ID":"eb345a70-1de4-46fd-bfbb-452026cde957","Type":"ContainerStarted","Data":"2a6d61bc25dc13b0a45af2f8cf8aed0e981020c6a8fe8244eea39a8f46cadb56"} Dec 03 19:54:24 crc kubenswrapper[4916]: I1203 19:54:24.877472 4916 generic.go:334] "Generic (PLEG): container finished" podID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" containerID="5ed1cf6dd0c29eea645797878d4f43563a3038b70f3730bd5386c8d916b109cd" exitCode=0 Dec 03 19:54:24 crc kubenswrapper[4916]: I1203 19:54:24.877604 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v75z5" event={"ID":"b30ceb3a-19d5-477a-8abb-e8241ea2e35b","Type":"ContainerDied","Data":"5ed1cf6dd0c29eea645797878d4f43563a3038b70f3730bd5386c8d916b109cd"} Dec 03 19:54:24 crc kubenswrapper[4916]: I1203 19:54:24.877646 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v75z5" event={"ID":"b30ceb3a-19d5-477a-8abb-e8241ea2e35b","Type":"ContainerStarted","Data":"0442b9ff4e3c045e2c880443b4405a3234fab112ef8234fb099285da4c876b71"} Dec 03 19:54:24 crc kubenswrapper[4916]: I1203 19:54:24.882974 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" event={"ID":"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11","Type":"ContainerStarted","Data":"32f72546e673eef5377d37223c2dbdec50614ca97cd6d7d84b338bb099ad0ab6"} Dec 03 19:54:24 crc kubenswrapper[4916]: I1203 19:54:24.944205 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" podStartSLOduration=3.776766999 podStartE2EDuration="14.944174375s" podCreationTimestamp="2025-12-03 19:54:10 +0000 UTC" firstStartedPulling="2025-12-03 19:54:12.478950665 +0000 UTC m=+1468.441760931" lastFinishedPulling="2025-12-03 19:54:23.646358041 +0000 UTC m=+1479.609168307" observedRunningTime="2025-12-03 19:54:24.914044386 +0000 UTC m=+1480.876854672" watchObservedRunningTime="2025-12-03 19:54:24.944174375 +0000 UTC m=+1480.906984651" Dec 03 19:54:24 crc kubenswrapper[4916]: I1203 19:54:24.957839 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 19:54:25 crc kubenswrapper[4916]: I1203 19:54:25.898405 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzmd" event={"ID":"eb345a70-1de4-46fd-bfbb-452026cde957","Type":"ContainerStarted","Data":"3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195"} Dec 03 19:54:26 crc kubenswrapper[4916]: I1203 19:54:26.908936 4916 generic.go:334] "Generic (PLEG): container finished" podID="eb345a70-1de4-46fd-bfbb-452026cde957" containerID="3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195" exitCode=0 Dec 03 19:54:26 crc kubenswrapper[4916]: I1203 19:54:26.909003 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzmd" event={"ID":"eb345a70-1de4-46fd-bfbb-452026cde957","Type":"ContainerDied","Data":"3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195"} Dec 03 19:54:26 crc kubenswrapper[4916]: I1203 19:54:26.911369 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v75z5" event={"ID":"b30ceb3a-19d5-477a-8abb-e8241ea2e35b","Type":"ContainerStarted","Data":"473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402"} Dec 03 19:54:27 crc kubenswrapper[4916]: I1203 19:54:27.925243 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzmd" event={"ID":"eb345a70-1de4-46fd-bfbb-452026cde957","Type":"ContainerStarted","Data":"2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f"} Dec 03 19:54:27 crc kubenswrapper[4916]: I1203 19:54:27.927992 4916 generic.go:334] "Generic (PLEG): container finished" podID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" containerID="473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402" exitCode=0 Dec 03 19:54:27 crc kubenswrapper[4916]: I1203 19:54:27.928055 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v75z5" event={"ID":"b30ceb3a-19d5-477a-8abb-e8241ea2e35b","Type":"ContainerDied","Data":"473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402"} Dec 03 19:54:27 crc kubenswrapper[4916]: I1203 19:54:27.954144 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-grzmd" podStartSLOduration=8.247022764 podStartE2EDuration="10.954128126s" podCreationTimestamp="2025-12-03 19:54:17 +0000 UTC" firstStartedPulling="2025-12-03 19:54:24.87580624 +0000 UTC m=+1480.838616556" lastFinishedPulling="2025-12-03 19:54:27.582911652 +0000 UTC m=+1483.545721918" observedRunningTime="2025-12-03 19:54:27.950506919 +0000 UTC m=+1483.913317185" watchObservedRunningTime="2025-12-03 19:54:27.954128126 +0000 UTC m=+1483.916938392" Dec 03 19:54:28 crc kubenswrapper[4916]: I1203 19:54:28.939165 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v75z5" event={"ID":"b30ceb3a-19d5-477a-8abb-e8241ea2e35b","Type":"ContainerStarted","Data":"fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2"} Dec 03 19:54:28 crc kubenswrapper[4916]: I1203 19:54:28.965110 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-v75z5" podStartSLOduration=4.477717975 podStartE2EDuration="7.965089371s" podCreationTimestamp="2025-12-03 19:54:21 +0000 UTC" firstStartedPulling="2025-12-03 19:54:24.880783943 +0000 UTC m=+1480.843594259" lastFinishedPulling="2025-12-03 19:54:28.368155389 +0000 UTC m=+1484.330965655" observedRunningTime="2025-12-03 19:54:28.960103927 +0000 UTC m=+1484.922914213" watchObservedRunningTime="2025-12-03 19:54:28.965089371 +0000 UTC m=+1484.927899637" Dec 03 19:54:32 crc kubenswrapper[4916]: I1203 19:54:32.073996 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:32 crc kubenswrapper[4916]: I1203 19:54:32.074055 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:32 crc kubenswrapper[4916]: I1203 19:54:32.132618 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:34 crc kubenswrapper[4916]: I1203 19:54:34.108365 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:34 crc kubenswrapper[4916]: I1203 19:54:34.187773 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-v75z5"] Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.018931 4916 generic.go:334] "Generic (PLEG): container finished" podID="d2eaefa8-6147-45c5-ae3e-77e0d47c2d11" containerID="32f72546e673eef5377d37223c2dbdec50614ca97cd6d7d84b338bb099ad0ab6" exitCode=0 Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.019012 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" event={"ID":"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11","Type":"ContainerDied","Data":"32f72546e673eef5377d37223c2dbdec50614ca97cd6d7d84b338bb099ad0ab6"} Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.019175 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-v75z5" podUID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" containerName="registry-server" containerID="cri-o://fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2" gracePeriod=2 Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.521870 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.596856 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t97sn\" (UniqueName: \"kubernetes.io/projected/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-kube-api-access-t97sn\") pod \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.596987 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-utilities\") pod \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.597022 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-catalog-content\") pod \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\" (UID: \"b30ceb3a-19d5-477a-8abb-e8241ea2e35b\") " Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.597934 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-utilities" (OuterVolumeSpecName: "utilities") pod "b30ceb3a-19d5-477a-8abb-e8241ea2e35b" (UID: "b30ceb3a-19d5-477a-8abb-e8241ea2e35b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.602632 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-kube-api-access-t97sn" (OuterVolumeSpecName: "kube-api-access-t97sn") pod "b30ceb3a-19d5-477a-8abb-e8241ea2e35b" (UID: "b30ceb3a-19d5-477a-8abb-e8241ea2e35b"). InnerVolumeSpecName "kube-api-access-t97sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.626444 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b30ceb3a-19d5-477a-8abb-e8241ea2e35b" (UID: "b30ceb3a-19d5-477a-8abb-e8241ea2e35b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.699391 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t97sn\" (UniqueName: \"kubernetes.io/projected/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-kube-api-access-t97sn\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.699450 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:36 crc kubenswrapper[4916]: I1203 19:54:36.699463 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b30ceb3a-19d5-477a-8abb-e8241ea2e35b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.035766 4916 generic.go:334] "Generic (PLEG): container finished" podID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" containerID="fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2" exitCode=0 Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.035850 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v75z5" event={"ID":"b30ceb3a-19d5-477a-8abb-e8241ea2e35b","Type":"ContainerDied","Data":"fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2"} Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.036276 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v75z5" event={"ID":"b30ceb3a-19d5-477a-8abb-e8241ea2e35b","Type":"ContainerDied","Data":"0442b9ff4e3c045e2c880443b4405a3234fab112ef8234fb099285da4c876b71"} Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.035859 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v75z5" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.036326 4916 scope.go:117] "RemoveContainer" containerID="fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.092854 4916 scope.go:117] "RemoveContainer" containerID="473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.104549 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-v75z5"] Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.118236 4916 scope.go:117] "RemoveContainer" containerID="5ed1cf6dd0c29eea645797878d4f43563a3038b70f3730bd5386c8d916b109cd" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.118717 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-v75z5"] Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.180845 4916 scope.go:117] "RemoveContainer" containerID="fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2" Dec 03 19:54:37 crc kubenswrapper[4916]: E1203 19:54:37.181418 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2\": container with ID starting with fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2 not found: ID does not exist" containerID="fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.181493 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2"} err="failed to get container status \"fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2\": rpc error: code = NotFound desc = could not find container \"fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2\": container with ID starting with fdbf013998b05636d9ecf810d9550ea79d6ad89d118d676e291a6662b876c0e2 not found: ID does not exist" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.181524 4916 scope.go:117] "RemoveContainer" containerID="473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402" Dec 03 19:54:37 crc kubenswrapper[4916]: E1203 19:54:37.181959 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402\": container with ID starting with 473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402 not found: ID does not exist" containerID="473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.182029 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402"} err="failed to get container status \"473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402\": rpc error: code = NotFound desc = could not find container \"473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402\": container with ID starting with 473a9e4ff9f90f03e31a8565326cb9ec464e6d41816ca99db9a937fe47a16402 not found: ID does not exist" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.182067 4916 scope.go:117] "RemoveContainer" containerID="5ed1cf6dd0c29eea645797878d4f43563a3038b70f3730bd5386c8d916b109cd" Dec 03 19:54:37 crc kubenswrapper[4916]: E1203 19:54:37.182451 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ed1cf6dd0c29eea645797878d4f43563a3038b70f3730bd5386c8d916b109cd\": container with ID starting with 5ed1cf6dd0c29eea645797878d4f43563a3038b70f3730bd5386c8d916b109cd not found: ID does not exist" containerID="5ed1cf6dd0c29eea645797878d4f43563a3038b70f3730bd5386c8d916b109cd" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.182511 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ed1cf6dd0c29eea645797878d4f43563a3038b70f3730bd5386c8d916b109cd"} err="failed to get container status \"5ed1cf6dd0c29eea645797878d4f43563a3038b70f3730bd5386c8d916b109cd\": rpc error: code = NotFound desc = could not find container \"5ed1cf6dd0c29eea645797878d4f43563a3038b70f3730bd5386c8d916b109cd\": container with ID starting with 5ed1cf6dd0c29eea645797878d4f43563a3038b70f3730bd5386c8d916b109cd not found: ID does not exist" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.440744 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.440789 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.494287 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.504557 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.514771 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-repo-setup-combined-ca-bundle\") pod \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.514833 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-ssh-key\") pod \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.514972 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-inventory\") pod \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.515016 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xpnl\" (UniqueName: \"kubernetes.io/projected/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-kube-api-access-4xpnl\") pod \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\" (UID: \"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11\") " Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.521288 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-kube-api-access-4xpnl" (OuterVolumeSpecName: "kube-api-access-4xpnl") pod "d2eaefa8-6147-45c5-ae3e-77e0d47c2d11" (UID: "d2eaefa8-6147-45c5-ae3e-77e0d47c2d11"). InnerVolumeSpecName "kube-api-access-4xpnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.521878 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "d2eaefa8-6147-45c5-ae3e-77e0d47c2d11" (UID: "d2eaefa8-6147-45c5-ae3e-77e0d47c2d11"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.556590 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d2eaefa8-6147-45c5-ae3e-77e0d47c2d11" (UID: "d2eaefa8-6147-45c5-ae3e-77e0d47c2d11"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.559961 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-inventory" (OuterVolumeSpecName: "inventory") pod "d2eaefa8-6147-45c5-ae3e-77e0d47c2d11" (UID: "d2eaefa8-6147-45c5-ae3e-77e0d47c2d11"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.618505 4916 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.618542 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.618551 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:37 crc kubenswrapper[4916]: I1203 19:54:37.618563 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xpnl\" (UniqueName: \"kubernetes.io/projected/d2eaefa8-6147-45c5-ae3e-77e0d47c2d11-kube-api-access-4xpnl\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.050174 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" event={"ID":"d2eaefa8-6147-45c5-ae3e-77e0d47c2d11","Type":"ContainerDied","Data":"5217b59be9aac8450a9bd2e00b3fe5b09882052f5435fb8af3069de33ca16762"} Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.050215 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5217b59be9aac8450a9bd2e00b3fe5b09882052f5435fb8af3069de33ca16762" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.050213 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.158287 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.201573 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x"] Dec 03 19:54:38 crc kubenswrapper[4916]: E1203 19:54:38.202075 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" containerName="extract-utilities" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.202093 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" containerName="extract-utilities" Dec 03 19:54:38 crc kubenswrapper[4916]: E1203 19:54:38.202120 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2eaefa8-6147-45c5-ae3e-77e0d47c2d11" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.202130 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2eaefa8-6147-45c5-ae3e-77e0d47c2d11" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 19:54:38 crc kubenswrapper[4916]: E1203 19:54:38.202151 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" containerName="registry-server" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.202158 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" containerName="registry-server" Dec 03 19:54:38 crc kubenswrapper[4916]: E1203 19:54:38.202186 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" containerName="extract-content" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.202193 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" containerName="extract-content" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.202415 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" containerName="registry-server" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.202442 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2eaefa8-6147-45c5-ae3e-77e0d47c2d11" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.203264 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.206959 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.207386 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.207527 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.207703 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.232044 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-bbf7x\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.232286 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-bbf7x\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.232496 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxrhr\" (UniqueName: \"kubernetes.io/projected/af00bdd2-2610-40a8-b6d7-1252796d9341-kube-api-access-rxrhr\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-bbf7x\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.243951 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x"] Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.334848 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-bbf7x\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.334928 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-bbf7x\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.335000 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxrhr\" (UniqueName: \"kubernetes.io/projected/af00bdd2-2610-40a8-b6d7-1252796d9341-kube-api-access-rxrhr\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-bbf7x\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.341013 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-bbf7x\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.341746 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-bbf7x\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.356908 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxrhr\" (UniqueName: \"kubernetes.io/projected/af00bdd2-2610-40a8-b6d7-1252796d9341-kube-api-access-rxrhr\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-bbf7x\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.491646 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b30ceb3a-19d5-477a-8abb-e8241ea2e35b" path="/var/lib/kubelet/pods/b30ceb3a-19d5-477a-8abb-e8241ea2e35b/volumes" Dec 03 19:54:38 crc kubenswrapper[4916]: I1203 19:54:38.521381 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:39 crc kubenswrapper[4916]: I1203 19:54:39.058395 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x"] Dec 03 19:54:39 crc kubenswrapper[4916]: I1203 19:54:39.455008 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-grzmd"] Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.074357 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" event={"ID":"af00bdd2-2610-40a8-b6d7-1252796d9341","Type":"ContainerStarted","Data":"5e8e7d4635d66c896ecc9b1fc6f79b101a9afdbcf5e362c1a5dcf0139bcb791a"} Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.074631 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" event={"ID":"af00bdd2-2610-40a8-b6d7-1252796d9341","Type":"ContainerStarted","Data":"f047f02b70efd4e57efc7da4c67e6a3fb9bc19a74a1a88c53a1bea24f6cdafcf"} Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.074398 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-grzmd" podUID="eb345a70-1de4-46fd-bfbb-452026cde957" containerName="registry-server" containerID="cri-o://2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f" gracePeriod=2 Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.097782 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" podStartSLOduration=1.635470577 podStartE2EDuration="2.097764025s" podCreationTimestamp="2025-12-03 19:54:38 +0000 UTC" firstStartedPulling="2025-12-03 19:54:39.067351648 +0000 UTC m=+1495.030161954" lastFinishedPulling="2025-12-03 19:54:39.529645126 +0000 UTC m=+1495.492455402" observedRunningTime="2025-12-03 19:54:40.092902444 +0000 UTC m=+1496.055712720" watchObservedRunningTime="2025-12-03 19:54:40.097764025 +0000 UTC m=+1496.060574311" Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.611789 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.685275 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6dzn\" (UniqueName: \"kubernetes.io/projected/eb345a70-1de4-46fd-bfbb-452026cde957-kube-api-access-n6dzn\") pod \"eb345a70-1de4-46fd-bfbb-452026cde957\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.685335 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-utilities\") pod \"eb345a70-1de4-46fd-bfbb-452026cde957\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.685585 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-catalog-content\") pod \"eb345a70-1de4-46fd-bfbb-452026cde957\" (UID: \"eb345a70-1de4-46fd-bfbb-452026cde957\") " Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.689650 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-utilities" (OuterVolumeSpecName: "utilities") pod "eb345a70-1de4-46fd-bfbb-452026cde957" (UID: "eb345a70-1de4-46fd-bfbb-452026cde957"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.711324 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb345a70-1de4-46fd-bfbb-452026cde957-kube-api-access-n6dzn" (OuterVolumeSpecName: "kube-api-access-n6dzn") pod "eb345a70-1de4-46fd-bfbb-452026cde957" (UID: "eb345a70-1de4-46fd-bfbb-452026cde957"). InnerVolumeSpecName "kube-api-access-n6dzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.739668 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb345a70-1de4-46fd-bfbb-452026cde957" (UID: "eb345a70-1de4-46fd-bfbb-452026cde957"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.788436 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.788475 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6dzn\" (UniqueName: \"kubernetes.io/projected/eb345a70-1de4-46fd-bfbb-452026cde957-kube-api-access-n6dzn\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:40 crc kubenswrapper[4916]: I1203 19:54:40.788486 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb345a70-1de4-46fd-bfbb-452026cde957-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.089247 4916 generic.go:334] "Generic (PLEG): container finished" podID="eb345a70-1de4-46fd-bfbb-452026cde957" containerID="2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f" exitCode=0 Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.089287 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzmd" event={"ID":"eb345a70-1de4-46fd-bfbb-452026cde957","Type":"ContainerDied","Data":"2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f"} Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.089352 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-grzmd" event={"ID":"eb345a70-1de4-46fd-bfbb-452026cde957","Type":"ContainerDied","Data":"2a6d61bc25dc13b0a45af2f8cf8aed0e981020c6a8fe8244eea39a8f46cadb56"} Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.089346 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-grzmd" Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.089369 4916 scope.go:117] "RemoveContainer" containerID="2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f" Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.115147 4916 scope.go:117] "RemoveContainer" containerID="3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195" Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.134346 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-grzmd"] Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.156256 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-grzmd"] Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.160588 4916 scope.go:117] "RemoveContainer" containerID="afa372d97a1b4e1143bde660a2a8ca9abf6e88f814efca70f1301f70be31d195" Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.184929 4916 scope.go:117] "RemoveContainer" containerID="2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f" Dec 03 19:54:41 crc kubenswrapper[4916]: E1203 19:54:41.185405 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f\": container with ID starting with 2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f not found: ID does not exist" containerID="2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f" Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.185526 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f"} err="failed to get container status \"2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f\": rpc error: code = NotFound desc = could not find container \"2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f\": container with ID starting with 2af0fb762b3f7a689d1cf8421f4da138c97d09d94da0ee772c3e311a3b45d01f not found: ID does not exist" Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.185641 4916 scope.go:117] "RemoveContainer" containerID="3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195" Dec 03 19:54:41 crc kubenswrapper[4916]: E1203 19:54:41.186067 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195\": container with ID starting with 3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195 not found: ID does not exist" containerID="3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195" Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.186097 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195"} err="failed to get container status \"3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195\": rpc error: code = NotFound desc = could not find container \"3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195\": container with ID starting with 3c199ac9fb5e5fb375cdd7933960a7e15b24ec2bce8a113abba54886b6cb7195 not found: ID does not exist" Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.186118 4916 scope.go:117] "RemoveContainer" containerID="afa372d97a1b4e1143bde660a2a8ca9abf6e88f814efca70f1301f70be31d195" Dec 03 19:54:41 crc kubenswrapper[4916]: E1203 19:54:41.186314 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afa372d97a1b4e1143bde660a2a8ca9abf6e88f814efca70f1301f70be31d195\": container with ID starting with afa372d97a1b4e1143bde660a2a8ca9abf6e88f814efca70f1301f70be31d195 not found: ID does not exist" containerID="afa372d97a1b4e1143bde660a2a8ca9abf6e88f814efca70f1301f70be31d195" Dec 03 19:54:41 crc kubenswrapper[4916]: I1203 19:54:41.186339 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afa372d97a1b4e1143bde660a2a8ca9abf6e88f814efca70f1301f70be31d195"} err="failed to get container status \"afa372d97a1b4e1143bde660a2a8ca9abf6e88f814efca70f1301f70be31d195\": rpc error: code = NotFound desc = could not find container \"afa372d97a1b4e1143bde660a2a8ca9abf6e88f814efca70f1301f70be31d195\": container with ID starting with afa372d97a1b4e1143bde660a2a8ca9abf6e88f814efca70f1301f70be31d195 not found: ID does not exist" Dec 03 19:54:42 crc kubenswrapper[4916]: I1203 19:54:42.494940 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb345a70-1de4-46fd-bfbb-452026cde957" path="/var/lib/kubelet/pods/eb345a70-1de4-46fd-bfbb-452026cde957/volumes" Dec 03 19:54:43 crc kubenswrapper[4916]: I1203 19:54:43.125447 4916 generic.go:334] "Generic (PLEG): container finished" podID="af00bdd2-2610-40a8-b6d7-1252796d9341" containerID="5e8e7d4635d66c896ecc9b1fc6f79b101a9afdbcf5e362c1a5dcf0139bcb791a" exitCode=0 Dec 03 19:54:43 crc kubenswrapper[4916]: I1203 19:54:43.125493 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" event={"ID":"af00bdd2-2610-40a8-b6d7-1252796d9341","Type":"ContainerDied","Data":"5e8e7d4635d66c896ecc9b1fc6f79b101a9afdbcf5e362c1a5dcf0139bcb791a"} Dec 03 19:54:44 crc kubenswrapper[4916]: I1203 19:54:44.622235 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:44 crc kubenswrapper[4916]: I1203 19:54:44.765963 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxrhr\" (UniqueName: \"kubernetes.io/projected/af00bdd2-2610-40a8-b6d7-1252796d9341-kube-api-access-rxrhr\") pod \"af00bdd2-2610-40a8-b6d7-1252796d9341\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " Dec 03 19:54:44 crc kubenswrapper[4916]: I1203 19:54:44.766022 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-inventory\") pod \"af00bdd2-2610-40a8-b6d7-1252796d9341\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " Dec 03 19:54:44 crc kubenswrapper[4916]: I1203 19:54:44.766125 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-ssh-key\") pod \"af00bdd2-2610-40a8-b6d7-1252796d9341\" (UID: \"af00bdd2-2610-40a8-b6d7-1252796d9341\") " Dec 03 19:54:44 crc kubenswrapper[4916]: I1203 19:54:44.771833 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af00bdd2-2610-40a8-b6d7-1252796d9341-kube-api-access-rxrhr" (OuterVolumeSpecName: "kube-api-access-rxrhr") pod "af00bdd2-2610-40a8-b6d7-1252796d9341" (UID: "af00bdd2-2610-40a8-b6d7-1252796d9341"). InnerVolumeSpecName "kube-api-access-rxrhr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:54:44 crc kubenswrapper[4916]: I1203 19:54:44.793517 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "af00bdd2-2610-40a8-b6d7-1252796d9341" (UID: "af00bdd2-2610-40a8-b6d7-1252796d9341"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:54:44 crc kubenswrapper[4916]: I1203 19:54:44.799986 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-inventory" (OuterVolumeSpecName: "inventory") pod "af00bdd2-2610-40a8-b6d7-1252796d9341" (UID: "af00bdd2-2610-40a8-b6d7-1252796d9341"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:54:44 crc kubenswrapper[4916]: I1203 19:54:44.868581 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxrhr\" (UniqueName: \"kubernetes.io/projected/af00bdd2-2610-40a8-b6d7-1252796d9341-kube-api-access-rxrhr\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:44 crc kubenswrapper[4916]: I1203 19:54:44.868610 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:44 crc kubenswrapper[4916]: I1203 19:54:44.868620 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af00bdd2-2610-40a8-b6d7-1252796d9341-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.149406 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" event={"ID":"af00bdd2-2610-40a8-b6d7-1252796d9341","Type":"ContainerDied","Data":"f047f02b70efd4e57efc7da4c67e6a3fb9bc19a74a1a88c53a1bea24f6cdafcf"} Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.149457 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f047f02b70efd4e57efc7da4c67e6a3fb9bc19a74a1a88c53a1bea24f6cdafcf" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.149478 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-bbf7x" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.226623 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp"] Dec 03 19:54:45 crc kubenswrapper[4916]: E1203 19:54:45.227326 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb345a70-1de4-46fd-bfbb-452026cde957" containerName="extract-utilities" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.227346 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb345a70-1de4-46fd-bfbb-452026cde957" containerName="extract-utilities" Dec 03 19:54:45 crc kubenswrapper[4916]: E1203 19:54:45.227356 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb345a70-1de4-46fd-bfbb-452026cde957" containerName="extract-content" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.227363 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb345a70-1de4-46fd-bfbb-452026cde957" containerName="extract-content" Dec 03 19:54:45 crc kubenswrapper[4916]: E1203 19:54:45.227375 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb345a70-1de4-46fd-bfbb-452026cde957" containerName="registry-server" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.227384 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb345a70-1de4-46fd-bfbb-452026cde957" containerName="registry-server" Dec 03 19:54:45 crc kubenswrapper[4916]: E1203 19:54:45.227399 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af00bdd2-2610-40a8-b6d7-1252796d9341" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.227408 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="af00bdd2-2610-40a8-b6d7-1252796d9341" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.228068 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb345a70-1de4-46fd-bfbb-452026cde957" containerName="registry-server" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.228098 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="af00bdd2-2610-40a8-b6d7-1252796d9341" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.228835 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.233095 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.233806 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.234035 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.234802 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.247768 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp"] Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.279273 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.279352 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.279479 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.279522 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv7kf\" (UniqueName: \"kubernetes.io/projected/93e63900-68b9-4c76-b614-78dcd0862645-kube-api-access-sv7kf\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.381594 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.381658 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.381708 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.381736 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv7kf\" (UniqueName: \"kubernetes.io/projected/93e63900-68b9-4c76-b614-78dcd0862645-kube-api-access-sv7kf\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.385724 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.388516 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.389672 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.397309 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv7kf\" (UniqueName: \"kubernetes.io/projected/93e63900-68b9-4c76-b614-78dcd0862645-kube-api-access-sv7kf\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.588768 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:54:45 crc kubenswrapper[4916]: I1203 19:54:45.945473 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp"] Dec 03 19:54:46 crc kubenswrapper[4916]: I1203 19:54:46.158977 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 19:54:46 crc kubenswrapper[4916]: I1203 19:54:46.159097 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 19:54:46 crc kubenswrapper[4916]: I1203 19:54:46.159169 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 19:54:46 crc kubenswrapper[4916]: I1203 19:54:46.160541 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 19:54:46 crc kubenswrapper[4916]: I1203 19:54:46.160718 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" gracePeriod=600 Dec 03 19:54:46 crc kubenswrapper[4916]: I1203 19:54:46.167978 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" event={"ID":"93e63900-68b9-4c76-b614-78dcd0862645","Type":"ContainerStarted","Data":"48d4644c8fc0c03e555e11b2ac3c922dfff3976251b66017c844498379f0b579"} Dec 03 19:54:46 crc kubenswrapper[4916]: E1203 19:54:46.315622 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:54:47 crc kubenswrapper[4916]: I1203 19:54:47.181284 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" exitCode=0 Dec 03 19:54:47 crc kubenswrapper[4916]: I1203 19:54:47.181681 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72"} Dec 03 19:54:47 crc kubenswrapper[4916]: I1203 19:54:47.181728 4916 scope.go:117] "RemoveContainer" containerID="f766f08ec381c0d446f946242779f93ec8affbc91dd83bc4db900247c021dcf7" Dec 03 19:54:47 crc kubenswrapper[4916]: I1203 19:54:47.182632 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:54:47 crc kubenswrapper[4916]: E1203 19:54:47.183077 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:54:47 crc kubenswrapper[4916]: I1203 19:54:47.186737 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" event={"ID":"93e63900-68b9-4c76-b614-78dcd0862645","Type":"ContainerStarted","Data":"5b17ccc513b7f1fb4594de39c47b9d64b44fdf2a195740f3f8c3a8791fa45c41"} Dec 03 19:54:47 crc kubenswrapper[4916]: I1203 19:54:47.232358 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" podStartSLOduration=1.555338054 podStartE2EDuration="2.232326215s" podCreationTimestamp="2025-12-03 19:54:45 +0000 UTC" firstStartedPulling="2025-12-03 19:54:45.953544011 +0000 UTC m=+1501.916354277" lastFinishedPulling="2025-12-03 19:54:46.630532172 +0000 UTC m=+1502.593342438" observedRunningTime="2025-12-03 19:54:47.231256836 +0000 UTC m=+1503.194067102" watchObservedRunningTime="2025-12-03 19:54:47.232326215 +0000 UTC m=+1503.195136521" Dec 03 19:54:50 crc kubenswrapper[4916]: I1203 19:54:50.737673 4916 scope.go:117] "RemoveContainer" containerID="2f8be4f8f2bdc2311e9669b998c36553b0f5983183ef8919b1928aad6c131ad0" Dec 03 19:54:50 crc kubenswrapper[4916]: I1203 19:54:50.785785 4916 scope.go:117] "RemoveContainer" containerID="59c8b75f8b7f7e8931893628d57d1aacb5dc1567ff29fba6d520b87c2e53a0de" Dec 03 19:54:58 crc kubenswrapper[4916]: I1203 19:54:58.478129 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:54:58 crc kubenswrapper[4916]: E1203 19:54:58.478979 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:55:11 crc kubenswrapper[4916]: I1203 19:55:11.478597 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:55:11 crc kubenswrapper[4916]: E1203 19:55:11.479606 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:55:23 crc kubenswrapper[4916]: I1203 19:55:23.478751 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:55:23 crc kubenswrapper[4916]: E1203 19:55:23.479658 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:55:34 crc kubenswrapper[4916]: I1203 19:55:34.486120 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:55:34 crc kubenswrapper[4916]: E1203 19:55:34.487022 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:55:45 crc kubenswrapper[4916]: I1203 19:55:45.478237 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:55:45 crc kubenswrapper[4916]: E1203 19:55:45.479211 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:55:50 crc kubenswrapper[4916]: I1203 19:55:50.949161 4916 scope.go:117] "RemoveContainer" containerID="47dd291dcb1707e985089dd6fe1ea10400031631de2e1f3c975f74b6ddf2d368" Dec 03 19:56:00 crc kubenswrapper[4916]: I1203 19:56:00.479950 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:56:00 crc kubenswrapper[4916]: E1203 19:56:00.481609 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:56:13 crc kubenswrapper[4916]: I1203 19:56:13.477799 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:56:13 crc kubenswrapper[4916]: E1203 19:56:13.478944 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:56:25 crc kubenswrapper[4916]: I1203 19:56:25.477922 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:56:25 crc kubenswrapper[4916]: E1203 19:56:25.478634 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:56:39 crc kubenswrapper[4916]: I1203 19:56:39.477901 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:56:39 crc kubenswrapper[4916]: E1203 19:56:39.478826 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:56:51 crc kubenswrapper[4916]: I1203 19:56:51.016931 4916 scope.go:117] "RemoveContainer" containerID="2d767894017ff04106242318b89c67b03fa0c0acbae08bda92f15c1f11d2b095" Dec 03 19:56:51 crc kubenswrapper[4916]: I1203 19:56:51.060555 4916 scope.go:117] "RemoveContainer" containerID="fe07e7f9b2713593652e751d1206e7db8b7d636fe7f6965bd9c7283f887dfe60" Dec 03 19:56:51 crc kubenswrapper[4916]: I1203 19:56:51.091706 4916 scope.go:117] "RemoveContainer" containerID="083e30ff46487b6262a4d0df4d8eacd55634dbc1c6efd709527b80a28d12cfd4" Dec 03 19:56:51 crc kubenswrapper[4916]: I1203 19:56:51.126799 4916 scope.go:117] "RemoveContainer" containerID="6fa84222aaee90ea25b64f983d6c42f09e370c17498163441e22ab995d3f3d7a" Dec 03 19:56:51 crc kubenswrapper[4916]: I1203 19:56:51.480274 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:56:51 crc kubenswrapper[4916]: E1203 19:56:51.480765 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:57:05 crc kubenswrapper[4916]: I1203 19:57:05.477941 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:57:05 crc kubenswrapper[4916]: E1203 19:57:05.478887 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:57:19 crc kubenswrapper[4916]: I1203 19:57:19.478178 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:57:19 crc kubenswrapper[4916]: E1203 19:57:19.479070 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:57:34 crc kubenswrapper[4916]: I1203 19:57:34.484150 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:57:34 crc kubenswrapper[4916]: E1203 19:57:34.485466 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:57:48 crc kubenswrapper[4916]: I1203 19:57:48.479648 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:57:48 crc kubenswrapper[4916]: E1203 19:57:48.480387 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:57:51 crc kubenswrapper[4916]: I1203 19:57:51.206208 4916 scope.go:117] "RemoveContainer" containerID="67b923533375b27d732eb895cf7ae25a63a19e77d8647c0ccc2294ad06bc967f" Dec 03 19:57:51 crc kubenswrapper[4916]: I1203 19:57:51.246079 4916 scope.go:117] "RemoveContainer" containerID="d20cafaed936f2a4f29dd87af7e224d6e0e8d9a898db3dfc5a4a64b750bef6b0" Dec 03 19:58:00 crc kubenswrapper[4916]: I1203 19:58:00.479457 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:58:00 crc kubenswrapper[4916]: E1203 19:58:00.480795 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:58:04 crc kubenswrapper[4916]: I1203 19:58:04.724384 4916 generic.go:334] "Generic (PLEG): container finished" podID="93e63900-68b9-4c76-b614-78dcd0862645" containerID="5b17ccc513b7f1fb4594de39c47b9d64b44fdf2a195740f3f8c3a8791fa45c41" exitCode=0 Dec 03 19:58:04 crc kubenswrapper[4916]: I1203 19:58:04.724447 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" event={"ID":"93e63900-68b9-4c76-b614-78dcd0862645","Type":"ContainerDied","Data":"5b17ccc513b7f1fb4594de39c47b9d64b44fdf2a195740f3f8c3a8791fa45c41"} Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.224629 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.328445 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-bootstrap-combined-ca-bundle\") pod \"93e63900-68b9-4c76-b614-78dcd0862645\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.329746 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-ssh-key\") pod \"93e63900-68b9-4c76-b614-78dcd0862645\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.329833 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-inventory\") pod \"93e63900-68b9-4c76-b614-78dcd0862645\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.329863 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sv7kf\" (UniqueName: \"kubernetes.io/projected/93e63900-68b9-4c76-b614-78dcd0862645-kube-api-access-sv7kf\") pod \"93e63900-68b9-4c76-b614-78dcd0862645\" (UID: \"93e63900-68b9-4c76-b614-78dcd0862645\") " Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.336388 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93e63900-68b9-4c76-b614-78dcd0862645-kube-api-access-sv7kf" (OuterVolumeSpecName: "kube-api-access-sv7kf") pod "93e63900-68b9-4c76-b614-78dcd0862645" (UID: "93e63900-68b9-4c76-b614-78dcd0862645"). InnerVolumeSpecName "kube-api-access-sv7kf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.337718 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "93e63900-68b9-4c76-b614-78dcd0862645" (UID: "93e63900-68b9-4c76-b614-78dcd0862645"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.359893 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "93e63900-68b9-4c76-b614-78dcd0862645" (UID: "93e63900-68b9-4c76-b614-78dcd0862645"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.381243 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-inventory" (OuterVolumeSpecName: "inventory") pod "93e63900-68b9-4c76-b614-78dcd0862645" (UID: "93e63900-68b9-4c76-b614-78dcd0862645"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.432856 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.432900 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.432915 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sv7kf\" (UniqueName: \"kubernetes.io/projected/93e63900-68b9-4c76-b614-78dcd0862645-kube-api-access-sv7kf\") on node \"crc\" DevicePath \"\"" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.432929 4916 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93e63900-68b9-4c76-b614-78dcd0862645-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.751736 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" event={"ID":"93e63900-68b9-4c76-b614-78dcd0862645","Type":"ContainerDied","Data":"48d4644c8fc0c03e555e11b2ac3c922dfff3976251b66017c844498379f0b579"} Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.751809 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48d4644c8fc0c03e555e11b2ac3c922dfff3976251b66017c844498379f0b579" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.751904 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.845446 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l"] Dec 03 19:58:06 crc kubenswrapper[4916]: E1203 19:58:06.845938 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93e63900-68b9-4c76-b614-78dcd0862645" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.845961 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="93e63900-68b9-4c76-b614-78dcd0862645" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.846193 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="93e63900-68b9-4c76-b614-78dcd0862645" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.847032 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.850978 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.851289 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.851665 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.851883 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.870824 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l"] Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.943803 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.943948 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2mrm\" (UniqueName: \"kubernetes.io/projected/f236d742-b29b-42c2-90ac-70d01657b967-kube-api-access-f2mrm\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:58:06 crc kubenswrapper[4916]: I1203 19:58:06.944012 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:58:07 crc kubenswrapper[4916]: I1203 19:58:07.045078 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2mrm\" (UniqueName: \"kubernetes.io/projected/f236d742-b29b-42c2-90ac-70d01657b967-kube-api-access-f2mrm\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:58:07 crc kubenswrapper[4916]: I1203 19:58:07.045326 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:58:07 crc kubenswrapper[4916]: I1203 19:58:07.045445 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:58:07 crc kubenswrapper[4916]: I1203 19:58:07.049940 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:58:07 crc kubenswrapper[4916]: I1203 19:58:07.051924 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:58:07 crc kubenswrapper[4916]: I1203 19:58:07.064275 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2mrm\" (UniqueName: \"kubernetes.io/projected/f236d742-b29b-42c2-90ac-70d01657b967-kube-api-access-f2mrm\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:58:07 crc kubenswrapper[4916]: I1203 19:58:07.172065 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:58:07 crc kubenswrapper[4916]: I1203 19:58:07.508392 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l"] Dec 03 19:58:07 crc kubenswrapper[4916]: I1203 19:58:07.514604 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 19:58:07 crc kubenswrapper[4916]: I1203 19:58:07.763844 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" event={"ID":"f236d742-b29b-42c2-90ac-70d01657b967","Type":"ContainerStarted","Data":"e128e8982dc974fa1db03d3443b847409dc7055398c9a422a810ce93473cdbfe"} Dec 03 19:58:08 crc kubenswrapper[4916]: I1203 19:58:08.060194 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-wszhq"] Dec 03 19:58:08 crc kubenswrapper[4916]: I1203 19:58:08.064053 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-q8p4t"] Dec 03 19:58:08 crc kubenswrapper[4916]: I1203 19:58:08.073107 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-wszhq"] Dec 03 19:58:08 crc kubenswrapper[4916]: I1203 19:58:08.080995 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-q8p4t"] Dec 03 19:58:08 crc kubenswrapper[4916]: I1203 19:58:08.492548 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50419ef1-71cf-4f8a-a74d-48a708e15785" path="/var/lib/kubelet/pods/50419ef1-71cf-4f8a-a74d-48a708e15785/volumes" Dec 03 19:58:08 crc kubenswrapper[4916]: I1203 19:58:08.494321 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c646c354-3c53-407c-ae77-4af980d70094" path="/var/lib/kubelet/pods/c646c354-3c53-407c-ae77-4af980d70094/volumes" Dec 03 19:58:08 crc kubenswrapper[4916]: I1203 19:58:08.793478 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" event={"ID":"f236d742-b29b-42c2-90ac-70d01657b967","Type":"ContainerStarted","Data":"4a6131fdfb7a63edd61dfb7c111416f140ebf3fbdfe4b816f0a83aba255e31f5"} Dec 03 19:58:08 crc kubenswrapper[4916]: I1203 19:58:08.817021 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" podStartSLOduration=2.135101646 podStartE2EDuration="2.817006365s" podCreationTimestamp="2025-12-03 19:58:06 +0000 UTC" firstStartedPulling="2025-12-03 19:58:07.514373713 +0000 UTC m=+1703.477183979" lastFinishedPulling="2025-12-03 19:58:08.196278392 +0000 UTC m=+1704.159088698" observedRunningTime="2025-12-03 19:58:08.816121512 +0000 UTC m=+1704.778931818" watchObservedRunningTime="2025-12-03 19:58:08.817006365 +0000 UTC m=+1704.779816631" Dec 03 19:58:09 crc kubenswrapper[4916]: I1203 19:58:09.034512 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-3026-account-create-update-c96hn"] Dec 03 19:58:09 crc kubenswrapper[4916]: I1203 19:58:09.046520 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-3026-account-create-update-c96hn"] Dec 03 19:58:09 crc kubenswrapper[4916]: I1203 19:58:09.059292 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-e728-account-create-update-wgg8d"] Dec 03 19:58:09 crc kubenswrapper[4916]: I1203 19:58:09.070187 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-e728-account-create-update-wgg8d"] Dec 03 19:58:10 crc kubenswrapper[4916]: I1203 19:58:10.492097 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6291ea12-342b-49d8-aa3c-671573f55c06" path="/var/lib/kubelet/pods/6291ea12-342b-49d8-aa3c-671573f55c06/volumes" Dec 03 19:58:10 crc kubenswrapper[4916]: I1203 19:58:10.494168 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaaed8df-f339-4fc0-a76e-be13e78ef8fd" path="/var/lib/kubelet/pods/eaaed8df-f339-4fc0-a76e-be13e78ef8fd/volumes" Dec 03 19:58:11 crc kubenswrapper[4916]: I1203 19:58:11.478592 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:58:11 crc kubenswrapper[4916]: E1203 19:58:11.478863 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:58:12 crc kubenswrapper[4916]: I1203 19:58:12.029416 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-ab84-account-create-update-7hc2j"] Dec 03 19:58:12 crc kubenswrapper[4916]: I1203 19:58:12.041614 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-ab84-account-create-update-7hc2j"] Dec 03 19:58:12 crc kubenswrapper[4916]: I1203 19:58:12.052591 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-hhxxm"] Dec 03 19:58:12 crc kubenswrapper[4916]: I1203 19:58:12.062695 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-hhxxm"] Dec 03 19:58:12 crc kubenswrapper[4916]: I1203 19:58:12.498329 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7651127-5741-4e95-8dc4-179999e506d8" path="/var/lib/kubelet/pods/e7651127-5741-4e95-8dc4-179999e506d8/volumes" Dec 03 19:58:12 crc kubenswrapper[4916]: I1203 19:58:12.500068 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e85d895f-c6a8-45f7-a18e-369f4cd00079" path="/var/lib/kubelet/pods/e85d895f-c6a8-45f7-a18e-369f4cd00079/volumes" Dec 03 19:58:23 crc kubenswrapper[4916]: I1203 19:58:23.479250 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:58:23 crc kubenswrapper[4916]: E1203 19:58:23.480263 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:58:34 crc kubenswrapper[4916]: I1203 19:58:34.080899 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-jn7bl"] Dec 03 19:58:34 crc kubenswrapper[4916]: I1203 19:58:34.098982 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-jn7bl"] Dec 03 19:58:34 crc kubenswrapper[4916]: I1203 19:58:34.497119 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c5b4292-d369-4cdf-b1c7-4da7ddf9643d" path="/var/lib/kubelet/pods/8c5b4292-d369-4cdf-b1c7-4da7ddf9643d/volumes" Dec 03 19:58:37 crc kubenswrapper[4916]: I1203 19:58:37.479885 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:58:37 crc kubenswrapper[4916]: E1203 19:58:37.480448 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.086462 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-d242-account-create-update-nl9k8"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.101181 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-d2ls9"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.111989 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-kf9l9"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.125377 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8e13-account-create-update-ztj9z"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.137699 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-b86a-account-create-update-zjg4j"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.156600 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-8wqnf"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.185304 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-j5b5c"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.194067 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-d2ls9"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.204219 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-d242-account-create-update-nl9k8"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.217791 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8e13-account-create-update-ztj9z"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.227687 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-kf9l9"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.237225 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-b86a-account-create-update-zjg4j"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.244098 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-1496-account-create-update-zgm7f"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.250380 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-j5b5c"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.257633 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-8wqnf"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.266088 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-1496-account-create-update-zgm7f"] Dec 03 19:58:49 crc kubenswrapper[4916]: I1203 19:58:49.477838 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:58:49 crc kubenswrapper[4916]: E1203 19:58:49.478178 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:58:50 crc kubenswrapper[4916]: I1203 19:58:50.487859 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22a8aa59-7a7f-44f7-b766-2f1648211423" path="/var/lib/kubelet/pods/22a8aa59-7a7f-44f7-b766-2f1648211423/volumes" Dec 03 19:58:50 crc kubenswrapper[4916]: I1203 19:58:50.488589 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ef23599-1005-496a-a421-e3f4300e8b5a" path="/var/lib/kubelet/pods/2ef23599-1005-496a-a421-e3f4300e8b5a/volumes" Dec 03 19:58:50 crc kubenswrapper[4916]: I1203 19:58:50.489111 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="321a3852-3277-44d8-a126-a8549e29d224" path="/var/lib/kubelet/pods/321a3852-3277-44d8-a126-a8549e29d224/volumes" Dec 03 19:58:50 crc kubenswrapper[4916]: I1203 19:58:50.489636 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a211a30-fc89-4c07-afaf-e269d5ba2295" path="/var/lib/kubelet/pods/8a211a30-fc89-4c07-afaf-e269d5ba2295/volumes" Dec 03 19:58:50 crc kubenswrapper[4916]: I1203 19:58:50.490593 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97246a98-afa5-477d-9528-19c6fd55a094" path="/var/lib/kubelet/pods/97246a98-afa5-477d-9528-19c6fd55a094/volumes" Dec 03 19:58:50 crc kubenswrapper[4916]: I1203 19:58:50.491072 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a17100cf-50d9-4a21-8d10-b1e49808fe53" path="/var/lib/kubelet/pods/a17100cf-50d9-4a21-8d10-b1e49808fe53/volumes" Dec 03 19:58:50 crc kubenswrapper[4916]: I1203 19:58:50.491549 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca" path="/var/lib/kubelet/pods/a9a023c2-00ce-4d7b-a9cc-b35f77cc39ca/volumes" Dec 03 19:58:50 crc kubenswrapper[4916]: I1203 19:58:50.492770 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9622e2e-7d39-4ec8-b6fc-580eee868216" path="/var/lib/kubelet/pods/d9622e2e-7d39-4ec8-b6fc-580eee868216/volumes" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.311924 4916 scope.go:117] "RemoveContainer" containerID="aba7d8ec5fb77b04be08f46e58d1616e2b916a918a31dd367327793d4f6952c7" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.352742 4916 scope.go:117] "RemoveContainer" containerID="3e88fafc9520cb9cb1417d38f0f7fd7eeb8b2f6da42e889aae28e92b2936dad2" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.419045 4916 scope.go:117] "RemoveContainer" containerID="29fc3d06133cde71b2a9f2376c331c9a8355540d0837281afd8c8404a15199eb" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.483237 4916 scope.go:117] "RemoveContainer" containerID="03dedab6881211d06323fe2a0f89529ee55f7ce2774f2e20f01d55e629d98895" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.563779 4916 scope.go:117] "RemoveContainer" containerID="1d55eef93911096e6d3efc48bf0bccf325e4086b99630d17caf739f48d45e7b3" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.603406 4916 scope.go:117] "RemoveContainer" containerID="5a77c14219fbf503d79ba40e11ef71d09df2161a77e9e5e6b61bf675bc73f9a6" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.647062 4916 scope.go:117] "RemoveContainer" containerID="176fbe21f4e8f33f27179fe19cd6a1b4a4173d0d4f30f7180a97e7d2448063d2" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.669340 4916 scope.go:117] "RemoveContainer" containerID="931ed90e3056a1e6708aa10dcac847f0dd8259eb6e4f7ebf9b8f35a3706805af" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.690229 4916 scope.go:117] "RemoveContainer" containerID="135964568bf6aabb64a18fc6920a2debd03b0df10a4e8139294c4e3d5168c572" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.720828 4916 scope.go:117] "RemoveContainer" containerID="38d4e6a1a5d77e6b6322fc12c44975a22dcf3485adacd38ae9acbd4be3e193dc" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.744068 4916 scope.go:117] "RemoveContainer" containerID="22bbbf064561daffc929f3b601affb8e9984182f7199e9b2e28f3c189a440968" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.768171 4916 scope.go:117] "RemoveContainer" containerID="4ace83561891847911640b21726dc52ec0f4e20225f05450c0c3bc9fe796129a" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.792844 4916 scope.go:117] "RemoveContainer" containerID="20268ae426aac796e7f96a8d537347861fb149c9a7ff8e5f5579ffdc9d0f8b32" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.824033 4916 scope.go:117] "RemoveContainer" containerID="72c2b057142a7c630bdcfa408aaca20533cfe49d30e66cb39f40bde8787e777e" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.856298 4916 scope.go:117] "RemoveContainer" containerID="7fd987c0f21cff9797a9b36b54de4c59cc99415a30ca78b04a004b25038f0294" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.896403 4916 scope.go:117] "RemoveContainer" containerID="3b5a058e96668f6b872e5a7e9fa56bb9461f0a2729095081a373414b52ab9699" Dec 03 19:58:51 crc kubenswrapper[4916]: I1203 19:58:51.932533 4916 scope.go:117] "RemoveContainer" containerID="845fa2c517ce949fb569fa0b08ea12fa5c8b19ea64af16121965f23ae2b1f734" Dec 03 19:58:54 crc kubenswrapper[4916]: I1203 19:58:54.041765 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-vzcmc"] Dec 03 19:58:54 crc kubenswrapper[4916]: I1203 19:58:54.060167 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-vzcmc"] Dec 03 19:58:54 crc kubenswrapper[4916]: I1203 19:58:54.489520 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94bf33bd-9ef5-41b9-820b-63fd78b3a384" path="/var/lib/kubelet/pods/94bf33bd-9ef5-41b9-820b-63fd78b3a384/volumes" Dec 03 19:59:01 crc kubenswrapper[4916]: I1203 19:59:01.478477 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:59:01 crc kubenswrapper[4916]: E1203 19:59:01.479591 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:59:15 crc kubenswrapper[4916]: I1203 19:59:15.478550 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:59:15 crc kubenswrapper[4916]: E1203 19:59:15.480104 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:59:29 crc kubenswrapper[4916]: I1203 19:59:29.486198 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:59:29 crc kubenswrapper[4916]: E1203 19:59:29.486997 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:59:38 crc kubenswrapper[4916]: I1203 19:59:38.062793 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-xct2g"] Dec 03 19:59:38 crc kubenswrapper[4916]: I1203 19:59:38.075450 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-mvxxd"] Dec 03 19:59:38 crc kubenswrapper[4916]: I1203 19:59:38.084250 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-j8v5b"] Dec 03 19:59:38 crc kubenswrapper[4916]: I1203 19:59:38.093124 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-xct2g"] Dec 03 19:59:38 crc kubenswrapper[4916]: I1203 19:59:38.102464 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-mvxxd"] Dec 03 19:59:38 crc kubenswrapper[4916]: I1203 19:59:38.114871 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-j8v5b"] Dec 03 19:59:38 crc kubenswrapper[4916]: I1203 19:59:38.490017 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2377ccfa-eef8-4809-993d-28cf0320206a" path="/var/lib/kubelet/pods/2377ccfa-eef8-4809-993d-28cf0320206a/volumes" Dec 03 19:59:38 crc kubenswrapper[4916]: I1203 19:59:38.490660 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a166793-92a9-4ad3-95cb-1743fe20d361" path="/var/lib/kubelet/pods/4a166793-92a9-4ad3-95cb-1743fe20d361/volumes" Dec 03 19:59:38 crc kubenswrapper[4916]: I1203 19:59:38.491173 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64ae3277-4d93-4a36-ba5a-9913bb3e58d7" path="/var/lib/kubelet/pods/64ae3277-4d93-4a36-ba5a-9913bb3e58d7/volumes" Dec 03 19:59:40 crc kubenswrapper[4916]: I1203 19:59:40.478792 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:59:40 crc kubenswrapper[4916]: E1203 19:59:40.479793 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 19:59:42 crc kubenswrapper[4916]: I1203 19:59:42.029437 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-lhffz"] Dec 03 19:59:42 crc kubenswrapper[4916]: I1203 19:59:42.038270 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-lhffz"] Dec 03 19:59:42 crc kubenswrapper[4916]: I1203 19:59:42.490746 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3c82cbe-38cf-4d4c-b18c-9b296776cb5c" path="/var/lib/kubelet/pods/d3c82cbe-38cf-4d4c-b18c-9b296776cb5c/volumes" Dec 03 19:59:47 crc kubenswrapper[4916]: I1203 19:59:47.893146 4916 generic.go:334] "Generic (PLEG): container finished" podID="f236d742-b29b-42c2-90ac-70d01657b967" containerID="4a6131fdfb7a63edd61dfb7c111416f140ebf3fbdfe4b816f0a83aba255e31f5" exitCode=0 Dec 03 19:59:47 crc kubenswrapper[4916]: I1203 19:59:47.893266 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" event={"ID":"f236d742-b29b-42c2-90ac-70d01657b967","Type":"ContainerDied","Data":"4a6131fdfb7a63edd61dfb7c111416f140ebf3fbdfe4b816f0a83aba255e31f5"} Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.379733 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.514995 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2mrm\" (UniqueName: \"kubernetes.io/projected/f236d742-b29b-42c2-90ac-70d01657b967-kube-api-access-f2mrm\") pod \"f236d742-b29b-42c2-90ac-70d01657b967\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.515096 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-ssh-key\") pod \"f236d742-b29b-42c2-90ac-70d01657b967\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.515181 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-inventory\") pod \"f236d742-b29b-42c2-90ac-70d01657b967\" (UID: \"f236d742-b29b-42c2-90ac-70d01657b967\") " Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.520706 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f236d742-b29b-42c2-90ac-70d01657b967-kube-api-access-f2mrm" (OuterVolumeSpecName: "kube-api-access-f2mrm") pod "f236d742-b29b-42c2-90ac-70d01657b967" (UID: "f236d742-b29b-42c2-90ac-70d01657b967"). InnerVolumeSpecName "kube-api-access-f2mrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.540957 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-inventory" (OuterVolumeSpecName: "inventory") pod "f236d742-b29b-42c2-90ac-70d01657b967" (UID: "f236d742-b29b-42c2-90ac-70d01657b967"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.543238 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f236d742-b29b-42c2-90ac-70d01657b967" (UID: "f236d742-b29b-42c2-90ac-70d01657b967"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.617940 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2mrm\" (UniqueName: \"kubernetes.io/projected/f236d742-b29b-42c2-90ac-70d01657b967-kube-api-access-f2mrm\") on node \"crc\" DevicePath \"\"" Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.617997 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.618016 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f236d742-b29b-42c2-90ac-70d01657b967-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.921029 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" event={"ID":"f236d742-b29b-42c2-90ac-70d01657b967","Type":"ContainerDied","Data":"e128e8982dc974fa1db03d3443b847409dc7055398c9a422a810ce93473cdbfe"} Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.921084 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e128e8982dc974fa1db03d3443b847409dc7055398c9a422a810ce93473cdbfe" Dec 03 19:59:49 crc kubenswrapper[4916]: I1203 19:59:49.921095 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.025150 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v"] Dec 03 19:59:50 crc kubenswrapper[4916]: E1203 19:59:50.025936 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f236d742-b29b-42c2-90ac-70d01657b967" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.025958 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f236d742-b29b-42c2-90ac-70d01657b967" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.026124 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f236d742-b29b-42c2-90ac-70d01657b967" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.027393 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.029313 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.029418 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.030199 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.030482 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.036835 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v"] Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.126812 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-v727v\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.126910 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-v727v\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.126943 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6xtp\" (UniqueName: \"kubernetes.io/projected/8d641422-c093-42d7-bc60-6df1dd5b0796-kube-api-access-s6xtp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-v727v\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.228871 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-v727v\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.229006 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-v727v\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.229047 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6xtp\" (UniqueName: \"kubernetes.io/projected/8d641422-c093-42d7-bc60-6df1dd5b0796-kube-api-access-s6xtp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-v727v\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.233240 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-v727v\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.244591 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-v727v\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.247862 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6xtp\" (UniqueName: \"kubernetes.io/projected/8d641422-c093-42d7-bc60-6df1dd5b0796-kube-api-access-s6xtp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-v727v\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.387162 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.885463 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v"] Dec 03 19:59:50 crc kubenswrapper[4916]: I1203 19:59:50.931084 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" event={"ID":"8d641422-c093-42d7-bc60-6df1dd5b0796","Type":"ContainerStarted","Data":"2ad4b0da75f5d53479f3d1505e310995ee945f707bd39f2d99f0f3fb2b8b9c21"} Dec 03 19:59:51 crc kubenswrapper[4916]: I1203 19:59:51.940094 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" event={"ID":"8d641422-c093-42d7-bc60-6df1dd5b0796","Type":"ContainerStarted","Data":"cac19df96568fa720cf8087719c27149967cd6ea001cdec2aabf7d211c31724c"} Dec 03 19:59:51 crc kubenswrapper[4916]: I1203 19:59:51.959165 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" podStartSLOduration=2.486959243 podStartE2EDuration="2.959146006s" podCreationTimestamp="2025-12-03 19:59:49 +0000 UTC" firstStartedPulling="2025-12-03 19:59:50.888047163 +0000 UTC m=+1806.850857449" lastFinishedPulling="2025-12-03 19:59:51.360233946 +0000 UTC m=+1807.323044212" observedRunningTime="2025-12-03 19:59:51.955366185 +0000 UTC m=+1807.918176461" watchObservedRunningTime="2025-12-03 19:59:51.959146006 +0000 UTC m=+1807.921956272" Dec 03 19:59:52 crc kubenswrapper[4916]: I1203 19:59:52.258190 4916 scope.go:117] "RemoveContainer" containerID="16855b2506ae4e533e78fe358d7fb9eb2644242928a53b6ae75b020d6f6aff58" Dec 03 19:59:52 crc kubenswrapper[4916]: I1203 19:59:52.288259 4916 scope.go:117] "RemoveContainer" containerID="84f1626ea5cf3e3cbd1d94f33ef8c72b5c7d1c136b59a64cdf8764c511f3ea9d" Dec 03 19:59:52 crc kubenswrapper[4916]: I1203 19:59:52.339740 4916 scope.go:117] "RemoveContainer" containerID="c140bef391449e52f103aefaf68bfcfdab5c883b0a860a104a7e44334a63f96e" Dec 03 19:59:52 crc kubenswrapper[4916]: I1203 19:59:52.411929 4916 scope.go:117] "RemoveContainer" containerID="d674940fced82d2aa961795cf7e868b9e49b8545a540155c283aea1156164fb8" Dec 03 19:59:52 crc kubenswrapper[4916]: I1203 19:59:52.449107 4916 scope.go:117] "RemoveContainer" containerID="41b13d31b1ca8af0cbcc8ee6cce215cd6a431c652bf14b78cf3e2d872fe7e0fb" Dec 03 19:59:54 crc kubenswrapper[4916]: I1203 19:59:54.486205 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 19:59:54 crc kubenswrapper[4916]: I1203 19:59:54.979691 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"e13c1fbfb649ac0f9340740b2db42f1899368837b532c074802cbbdf37483fd4"} Dec 03 19:59:55 crc kubenswrapper[4916]: I1203 19:59:55.056245 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-q6cjh"] Dec 03 19:59:55 crc kubenswrapper[4916]: I1203 19:59:55.070754 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-q6cjh"] Dec 03 19:59:56 crc kubenswrapper[4916]: I1203 19:59:56.494558 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d5347c1-1439-4284-977d-390912ffe9a5" path="/var/lib/kubelet/pods/0d5347c1-1439-4284-977d-390912ffe9a5/volumes" Dec 03 19:59:57 crc kubenswrapper[4916]: I1203 19:59:57.037988 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-l4sh2"] Dec 03 19:59:57 crc kubenswrapper[4916]: I1203 19:59:57.049765 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-l4sh2"] Dec 03 19:59:58 crc kubenswrapper[4916]: I1203 19:59:58.491227 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eac938e-d147-4214-a0b1-4a17ac69b649" path="/var/lib/kubelet/pods/7eac938e-d147-4214-a0b1-4a17ac69b649/volumes" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.161226 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf"] Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.164012 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.172733 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.172738 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.189706 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf"] Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.232317 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcw6b\" (UniqueName: \"kubernetes.io/projected/d4c152c6-ef21-44f7-b910-3299502de72a-kube-api-access-pcw6b\") pod \"collect-profiles-29413200-xcmqf\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.232382 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4c152c6-ef21-44f7-b910-3299502de72a-secret-volume\") pod \"collect-profiles-29413200-xcmqf\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.232497 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4c152c6-ef21-44f7-b910-3299502de72a-config-volume\") pod \"collect-profiles-29413200-xcmqf\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.334618 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4c152c6-ef21-44f7-b910-3299502de72a-config-volume\") pod \"collect-profiles-29413200-xcmqf\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.334824 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcw6b\" (UniqueName: \"kubernetes.io/projected/d4c152c6-ef21-44f7-b910-3299502de72a-kube-api-access-pcw6b\") pod \"collect-profiles-29413200-xcmqf\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.334852 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4c152c6-ef21-44f7-b910-3299502de72a-secret-volume\") pod \"collect-profiles-29413200-xcmqf\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.335627 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4c152c6-ef21-44f7-b910-3299502de72a-config-volume\") pod \"collect-profiles-29413200-xcmqf\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.351254 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4c152c6-ef21-44f7-b910-3299502de72a-secret-volume\") pod \"collect-profiles-29413200-xcmqf\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.356327 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcw6b\" (UniqueName: \"kubernetes.io/projected/d4c152c6-ef21-44f7-b910-3299502de72a-kube-api-access-pcw6b\") pod \"collect-profiles-29413200-xcmqf\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.490304 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:00 crc kubenswrapper[4916]: I1203 20:00:00.983067 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf"] Dec 03 20:00:01 crc kubenswrapper[4916]: I1203 20:00:01.054595 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" event={"ID":"d4c152c6-ef21-44f7-b910-3299502de72a","Type":"ContainerStarted","Data":"a14d343d5e8b7ca2b36c74fec10b158ae8f8fa54bf6d1219462f64f3c629cade"} Dec 03 20:00:02 crc kubenswrapper[4916]: I1203 20:00:02.070025 4916 generic.go:334] "Generic (PLEG): container finished" podID="d4c152c6-ef21-44f7-b910-3299502de72a" containerID="9f608ec231c183b8e79ab4b193eb4d382c013849808fe5932e2a0f82953031f7" exitCode=0 Dec 03 20:00:02 crc kubenswrapper[4916]: I1203 20:00:02.070243 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" event={"ID":"d4c152c6-ef21-44f7-b910-3299502de72a","Type":"ContainerDied","Data":"9f608ec231c183b8e79ab4b193eb4d382c013849808fe5932e2a0f82953031f7"} Dec 03 20:00:03 crc kubenswrapper[4916]: I1203 20:00:03.456021 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:03 crc kubenswrapper[4916]: I1203 20:00:03.538398 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4c152c6-ef21-44f7-b910-3299502de72a-secret-volume\") pod \"d4c152c6-ef21-44f7-b910-3299502de72a\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " Dec 03 20:00:03 crc kubenswrapper[4916]: I1203 20:00:03.538649 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcw6b\" (UniqueName: \"kubernetes.io/projected/d4c152c6-ef21-44f7-b910-3299502de72a-kube-api-access-pcw6b\") pod \"d4c152c6-ef21-44f7-b910-3299502de72a\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " Dec 03 20:00:03 crc kubenswrapper[4916]: I1203 20:00:03.538688 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4c152c6-ef21-44f7-b910-3299502de72a-config-volume\") pod \"d4c152c6-ef21-44f7-b910-3299502de72a\" (UID: \"d4c152c6-ef21-44f7-b910-3299502de72a\") " Dec 03 20:00:03 crc kubenswrapper[4916]: I1203 20:00:03.539354 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4c152c6-ef21-44f7-b910-3299502de72a-config-volume" (OuterVolumeSpecName: "config-volume") pod "d4c152c6-ef21-44f7-b910-3299502de72a" (UID: "d4c152c6-ef21-44f7-b910-3299502de72a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:00:03 crc kubenswrapper[4916]: I1203 20:00:03.555975 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4c152c6-ef21-44f7-b910-3299502de72a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d4c152c6-ef21-44f7-b910-3299502de72a" (UID: "d4c152c6-ef21-44f7-b910-3299502de72a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:00:03 crc kubenswrapper[4916]: I1203 20:00:03.556015 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4c152c6-ef21-44f7-b910-3299502de72a-kube-api-access-pcw6b" (OuterVolumeSpecName: "kube-api-access-pcw6b") pod "d4c152c6-ef21-44f7-b910-3299502de72a" (UID: "d4c152c6-ef21-44f7-b910-3299502de72a"). InnerVolumeSpecName "kube-api-access-pcw6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:00:03 crc kubenswrapper[4916]: I1203 20:00:03.641302 4916 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d4c152c6-ef21-44f7-b910-3299502de72a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 20:00:03 crc kubenswrapper[4916]: I1203 20:00:03.641335 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcw6b\" (UniqueName: \"kubernetes.io/projected/d4c152c6-ef21-44f7-b910-3299502de72a-kube-api-access-pcw6b\") on node \"crc\" DevicePath \"\"" Dec 03 20:00:03 crc kubenswrapper[4916]: I1203 20:00:03.641346 4916 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d4c152c6-ef21-44f7-b910-3299502de72a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 20:00:04 crc kubenswrapper[4916]: I1203 20:00:04.094274 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" event={"ID":"d4c152c6-ef21-44f7-b910-3299502de72a","Type":"ContainerDied","Data":"a14d343d5e8b7ca2b36c74fec10b158ae8f8fa54bf6d1219462f64f3c629cade"} Dec 03 20:00:04 crc kubenswrapper[4916]: I1203 20:00:04.094317 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a14d343d5e8b7ca2b36c74fec10b158ae8f8fa54bf6d1219462f64f3c629cade" Dec 03 20:00:04 crc kubenswrapper[4916]: I1203 20:00:04.094362 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf" Dec 03 20:00:36 crc kubenswrapper[4916]: I1203 20:00:36.068873 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-mjjt5"] Dec 03 20:00:36 crc kubenswrapper[4916]: I1203 20:00:36.087601 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-5sdxp"] Dec 03 20:00:36 crc kubenswrapper[4916]: I1203 20:00:36.101122 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-mjjt5"] Dec 03 20:00:36 crc kubenswrapper[4916]: I1203 20:00:36.110882 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-5sdxp"] Dec 03 20:00:36 crc kubenswrapper[4916]: I1203 20:00:36.498550 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a20edfd-dcdf-4b70-aa9a-c930b6210dcd" path="/var/lib/kubelet/pods/7a20edfd-dcdf-4b70-aa9a-c930b6210dcd/volumes" Dec 03 20:00:36 crc kubenswrapper[4916]: I1203 20:00:36.501292 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e" path="/var/lib/kubelet/pods/cbfeee7e-2dd3-44b2-bd95-bcde8f377e1e/volumes" Dec 03 20:00:37 crc kubenswrapper[4916]: I1203 20:00:37.044927 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-e24b-account-create-update-fcl4t"] Dec 03 20:00:37 crc kubenswrapper[4916]: I1203 20:00:37.063615 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-6491-account-create-update-rn5pw"] Dec 03 20:00:37 crc kubenswrapper[4916]: I1203 20:00:37.075725 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-8bs7m"] Dec 03 20:00:37 crc kubenswrapper[4916]: I1203 20:00:37.084776 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-4786-account-create-update-fl47z"] Dec 03 20:00:37 crc kubenswrapper[4916]: I1203 20:00:37.091372 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-e24b-account-create-update-fcl4t"] Dec 03 20:00:37 crc kubenswrapper[4916]: I1203 20:00:37.097681 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-8bs7m"] Dec 03 20:00:37 crc kubenswrapper[4916]: I1203 20:00:37.104421 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-6491-account-create-update-rn5pw"] Dec 03 20:00:37 crc kubenswrapper[4916]: I1203 20:00:37.110431 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-4786-account-create-update-fl47z"] Dec 03 20:00:38 crc kubenswrapper[4916]: I1203 20:00:38.495765 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="482fafd3-d1fd-4235-888d-aa645bdaa1e3" path="/var/lib/kubelet/pods/482fafd3-d1fd-4235-888d-aa645bdaa1e3/volumes" Dec 03 20:00:38 crc kubenswrapper[4916]: I1203 20:00:38.497842 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="523ba5d4-842e-4726-8b66-813508d7a9d2" path="/var/lib/kubelet/pods/523ba5d4-842e-4726-8b66-813508d7a9d2/volumes" Dec 03 20:00:38 crc kubenswrapper[4916]: I1203 20:00:38.499332 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97f0e085-66c0-48ab-b023-6a4d50e08683" path="/var/lib/kubelet/pods/97f0e085-66c0-48ab-b023-6a4d50e08683/volumes" Dec 03 20:00:38 crc kubenswrapper[4916]: I1203 20:00:38.501025 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5555e47-dacf-4ef9-80a4-a1bbd57dca1a" path="/var/lib/kubelet/pods/f5555e47-dacf-4ef9-80a4-a1bbd57dca1a/volumes" Dec 03 20:00:52 crc kubenswrapper[4916]: I1203 20:00:52.593078 4916 scope.go:117] "RemoveContainer" containerID="90b20d3334567229aba4e98e42071ef308818e8ba18253da6b66ae22e934cf35" Dec 03 20:00:52 crc kubenswrapper[4916]: I1203 20:00:52.643646 4916 scope.go:117] "RemoveContainer" containerID="14acda667d359a2b662693c049ea2e397bccda516dfa3a5264a4348972804bd5" Dec 03 20:00:52 crc kubenswrapper[4916]: I1203 20:00:52.665850 4916 scope.go:117] "RemoveContainer" containerID="20a35aba4807ea23c2ccad2b4be130ab685a2f3d08a66d1e32e4dfb8fa992b8d" Dec 03 20:00:52 crc kubenswrapper[4916]: I1203 20:00:52.722497 4916 scope.go:117] "RemoveContainer" containerID="b49a93ae7a4d362c6b300c8bdc9910e9377074a5101adbb6faaf57b411f316f6" Dec 03 20:00:52 crc kubenswrapper[4916]: I1203 20:00:52.774906 4916 scope.go:117] "RemoveContainer" containerID="a08fbda909c28e16479d4ce436f4f27a71a4d4a5bca53129f2755744aca4295e" Dec 03 20:00:52 crc kubenswrapper[4916]: I1203 20:00:52.806343 4916 scope.go:117] "RemoveContainer" containerID="4ce90c5a72df412f20c1ccc631511f59485ef147810e3d59b97ebcb6a821ce59" Dec 03 20:00:52 crc kubenswrapper[4916]: I1203 20:00:52.855973 4916 scope.go:117] "RemoveContainer" containerID="179b160895dee461f15502b215cfd6dfc04c096f3b758bab8073ab8293291d2c" Dec 03 20:00:52 crc kubenswrapper[4916]: I1203 20:00:52.877409 4916 scope.go:117] "RemoveContainer" containerID="0b4e4dddabc7f02d77448e14bc4c95b11fc7fd86b91c67eca6925a68ef79d8d7" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.159755 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29413201-56f24"] Dec 03 20:01:00 crc kubenswrapper[4916]: E1203 20:01:00.160818 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4c152c6-ef21-44f7-b910-3299502de72a" containerName="collect-profiles" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.160837 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4c152c6-ef21-44f7-b910-3299502de72a" containerName="collect-profiles" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.161030 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4c152c6-ef21-44f7-b910-3299502de72a" containerName="collect-profiles" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.161716 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.179905 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29413201-56f24"] Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.292218 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-config-data\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.292275 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-combined-ca-bundle\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.292326 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-fernet-keys\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.292357 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssphc\" (UniqueName: \"kubernetes.io/projected/f303570c-cd6a-4249-9f85-dda22c04e2a7-kube-api-access-ssphc\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.394450 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-fernet-keys\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.394519 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssphc\" (UniqueName: \"kubernetes.io/projected/f303570c-cd6a-4249-9f85-dda22c04e2a7-kube-api-access-ssphc\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.394682 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-config-data\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.394719 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-combined-ca-bundle\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.401849 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-config-data\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.401902 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-combined-ca-bundle\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.406832 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-fernet-keys\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.416254 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssphc\" (UniqueName: \"kubernetes.io/projected/f303570c-cd6a-4249-9f85-dda22c04e2a7-kube-api-access-ssphc\") pod \"keystone-cron-29413201-56f24\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.483706 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:00 crc kubenswrapper[4916]: I1203 20:01:00.823793 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29413201-56f24"] Dec 03 20:01:01 crc kubenswrapper[4916]: I1203 20:01:01.744231 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29413201-56f24" event={"ID":"f303570c-cd6a-4249-9f85-dda22c04e2a7","Type":"ContainerStarted","Data":"cb7d9d4fb9d3d71a22783d66d50bee6d2dc7c06a556bdd667f5cbc387f3a0f16"} Dec 03 20:01:01 crc kubenswrapper[4916]: I1203 20:01:01.744684 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29413201-56f24" event={"ID":"f303570c-cd6a-4249-9f85-dda22c04e2a7","Type":"ContainerStarted","Data":"85280875d42959fa5aaa4fb8a5acb1b9324e225b1d86cd67bb870698bf801e7a"} Dec 03 20:01:01 crc kubenswrapper[4916]: I1203 20:01:01.766062 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29413201-56f24" podStartSLOduration=1.7660358 podStartE2EDuration="1.7660358s" podCreationTimestamp="2025-12-03 20:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:01:01.760222433 +0000 UTC m=+1877.723032709" watchObservedRunningTime="2025-12-03 20:01:01.7660358 +0000 UTC m=+1877.728846086" Dec 03 20:01:03 crc kubenswrapper[4916]: I1203 20:01:03.048362 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5vk6b"] Dec 03 20:01:03 crc kubenswrapper[4916]: I1203 20:01:03.056782 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-5vk6b"] Dec 03 20:01:03 crc kubenswrapper[4916]: I1203 20:01:03.770469 4916 generic.go:334] "Generic (PLEG): container finished" podID="f303570c-cd6a-4249-9f85-dda22c04e2a7" containerID="cb7d9d4fb9d3d71a22783d66d50bee6d2dc7c06a556bdd667f5cbc387f3a0f16" exitCode=0 Dec 03 20:01:03 crc kubenswrapper[4916]: I1203 20:01:03.770517 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29413201-56f24" event={"ID":"f303570c-cd6a-4249-9f85-dda22c04e2a7","Type":"ContainerDied","Data":"cb7d9d4fb9d3d71a22783d66d50bee6d2dc7c06a556bdd667f5cbc387f3a0f16"} Dec 03 20:01:04 crc kubenswrapper[4916]: I1203 20:01:04.488697 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05ab20e9-de0e-4f40-aa4b-a3b685fe9712" path="/var/lib/kubelet/pods/05ab20e9-de0e-4f40-aa4b-a3b685fe9712/volumes" Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.202301 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.319476 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-combined-ca-bundle\") pod \"f303570c-cd6a-4249-9f85-dda22c04e2a7\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.319636 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-config-data\") pod \"f303570c-cd6a-4249-9f85-dda22c04e2a7\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.319670 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-fernet-keys\") pod \"f303570c-cd6a-4249-9f85-dda22c04e2a7\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.319689 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssphc\" (UniqueName: \"kubernetes.io/projected/f303570c-cd6a-4249-9f85-dda22c04e2a7-kube-api-access-ssphc\") pod \"f303570c-cd6a-4249-9f85-dda22c04e2a7\" (UID: \"f303570c-cd6a-4249-9f85-dda22c04e2a7\") " Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.325531 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f303570c-cd6a-4249-9f85-dda22c04e2a7" (UID: "f303570c-cd6a-4249-9f85-dda22c04e2a7"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.326111 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f303570c-cd6a-4249-9f85-dda22c04e2a7-kube-api-access-ssphc" (OuterVolumeSpecName: "kube-api-access-ssphc") pod "f303570c-cd6a-4249-9f85-dda22c04e2a7" (UID: "f303570c-cd6a-4249-9f85-dda22c04e2a7"). InnerVolumeSpecName "kube-api-access-ssphc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.350420 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f303570c-cd6a-4249-9f85-dda22c04e2a7" (UID: "f303570c-cd6a-4249-9f85-dda22c04e2a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.394347 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-config-data" (OuterVolumeSpecName: "config-data") pod "f303570c-cd6a-4249-9f85-dda22c04e2a7" (UID: "f303570c-cd6a-4249-9f85-dda22c04e2a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.423155 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.423183 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.423192 4916 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f303570c-cd6a-4249-9f85-dda22c04e2a7-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.423200 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssphc\" (UniqueName: \"kubernetes.io/projected/f303570c-cd6a-4249-9f85-dda22c04e2a7-kube-api-access-ssphc\") on node \"crc\" DevicePath \"\"" Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.795453 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29413201-56f24" event={"ID":"f303570c-cd6a-4249-9f85-dda22c04e2a7","Type":"ContainerDied","Data":"85280875d42959fa5aaa4fb8a5acb1b9324e225b1d86cd67bb870698bf801e7a"} Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.795503 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85280875d42959fa5aaa4fb8a5acb1b9324e225b1d86cd67bb870698bf801e7a" Dec 03 20:01:05 crc kubenswrapper[4916]: I1203 20:01:05.795553 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29413201-56f24" Dec 03 20:01:13 crc kubenswrapper[4916]: I1203 20:01:13.882168 4916 generic.go:334] "Generic (PLEG): container finished" podID="8d641422-c093-42d7-bc60-6df1dd5b0796" containerID="cac19df96568fa720cf8087719c27149967cd6ea001cdec2aabf7d211c31724c" exitCode=0 Dec 03 20:01:13 crc kubenswrapper[4916]: I1203 20:01:13.882278 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" event={"ID":"8d641422-c093-42d7-bc60-6df1dd5b0796","Type":"ContainerDied","Data":"cac19df96568fa720cf8087719c27149967cd6ea001cdec2aabf7d211c31724c"} Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.445131 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.539508 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-ssh-key\") pod \"8d641422-c093-42d7-bc60-6df1dd5b0796\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.539723 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6xtp\" (UniqueName: \"kubernetes.io/projected/8d641422-c093-42d7-bc60-6df1dd5b0796-kube-api-access-s6xtp\") pod \"8d641422-c093-42d7-bc60-6df1dd5b0796\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.539888 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-inventory\") pod \"8d641422-c093-42d7-bc60-6df1dd5b0796\" (UID: \"8d641422-c093-42d7-bc60-6df1dd5b0796\") " Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.548007 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d641422-c093-42d7-bc60-6df1dd5b0796-kube-api-access-s6xtp" (OuterVolumeSpecName: "kube-api-access-s6xtp") pod "8d641422-c093-42d7-bc60-6df1dd5b0796" (UID: "8d641422-c093-42d7-bc60-6df1dd5b0796"). InnerVolumeSpecName "kube-api-access-s6xtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.575808 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-inventory" (OuterVolumeSpecName: "inventory") pod "8d641422-c093-42d7-bc60-6df1dd5b0796" (UID: "8d641422-c093-42d7-bc60-6df1dd5b0796"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.579851 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8d641422-c093-42d7-bc60-6df1dd5b0796" (UID: "8d641422-c093-42d7-bc60-6df1dd5b0796"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.642768 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6xtp\" (UniqueName: \"kubernetes.io/projected/8d641422-c093-42d7-bc60-6df1dd5b0796-kube-api-access-s6xtp\") on node \"crc\" DevicePath \"\"" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.642811 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.642824 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d641422-c093-42d7-bc60-6df1dd5b0796-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.901643 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" event={"ID":"8d641422-c093-42d7-bc60-6df1dd5b0796","Type":"ContainerDied","Data":"2ad4b0da75f5d53479f3d1505e310995ee945f707bd39f2d99f0f3fb2b8b9c21"} Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.901676 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-v727v" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.901691 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ad4b0da75f5d53479f3d1505e310995ee945f707bd39f2d99f0f3fb2b8b9c21" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.980840 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h"] Dec 03 20:01:15 crc kubenswrapper[4916]: E1203 20:01:15.981287 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d641422-c093-42d7-bc60-6df1dd5b0796" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.981303 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d641422-c093-42d7-bc60-6df1dd5b0796" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 20:01:15 crc kubenswrapper[4916]: E1203 20:01:15.981314 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f303570c-cd6a-4249-9f85-dda22c04e2a7" containerName="keystone-cron" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.981323 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f303570c-cd6a-4249-9f85-dda22c04e2a7" containerName="keystone-cron" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.981555 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f303570c-cd6a-4249-9f85-dda22c04e2a7" containerName="keystone-cron" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.981597 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d641422-c093-42d7-bc60-6df1dd5b0796" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.982473 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.985938 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.986175 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.986219 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.992017 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:01:15 crc kubenswrapper[4916]: I1203 20:01:15.992690 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h"] Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.154245 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktpjn\" (UniqueName: \"kubernetes.io/projected/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-kube-api-access-ktpjn\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.154315 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.154598 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.257003 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktpjn\" (UniqueName: \"kubernetes.io/projected/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-kube-api-access-ktpjn\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.257083 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.257151 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.260820 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.264846 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.286864 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktpjn\" (UniqueName: \"kubernetes.io/projected/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-kube-api-access-ktpjn\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.316468 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.864397 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h"] Dec 03 20:01:16 crc kubenswrapper[4916]: I1203 20:01:16.911619 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" event={"ID":"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a","Type":"ContainerStarted","Data":"a4e44324ab44b7d05a40ea5d59a3ba16d07096ddfb8153f83935fca9e56b646e"} Dec 03 20:01:17 crc kubenswrapper[4916]: I1203 20:01:17.928442 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" event={"ID":"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a","Type":"ContainerStarted","Data":"154119057f0dbb01ae4cbc88769513bf63f4400e0a65ddfee18f0e457ac349f0"} Dec 03 20:01:17 crc kubenswrapper[4916]: I1203 20:01:17.986546 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" podStartSLOduration=2.321385922 podStartE2EDuration="2.986522183s" podCreationTimestamp="2025-12-03 20:01:15 +0000 UTC" firstStartedPulling="2025-12-03 20:01:16.86808971 +0000 UTC m=+1892.830899976" lastFinishedPulling="2025-12-03 20:01:17.533225951 +0000 UTC m=+1893.496036237" observedRunningTime="2025-12-03 20:01:17.956159487 +0000 UTC m=+1893.918969843" watchObservedRunningTime="2025-12-03 20:01:17.986522183 +0000 UTC m=+1893.949332469" Dec 03 20:01:24 crc kubenswrapper[4916]: I1203 20:01:24.003349 4916 generic.go:334] "Generic (PLEG): container finished" podID="d64868ee-2aa9-48b3-bfd7-895a9daf8c5a" containerID="154119057f0dbb01ae4cbc88769513bf63f4400e0a65ddfee18f0e457ac349f0" exitCode=0 Dec 03 20:01:24 crc kubenswrapper[4916]: I1203 20:01:24.003460 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" event={"ID":"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a","Type":"ContainerDied","Data":"154119057f0dbb01ae4cbc88769513bf63f4400e0a65ddfee18f0e457ac349f0"} Dec 03 20:01:25 crc kubenswrapper[4916]: I1203 20:01:25.460401 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:25 crc kubenswrapper[4916]: I1203 20:01:25.554898 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-ssh-key\") pod \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " Dec 03 20:01:25 crc kubenswrapper[4916]: I1203 20:01:25.555003 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-inventory\") pod \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " Dec 03 20:01:25 crc kubenswrapper[4916]: I1203 20:01:25.555070 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktpjn\" (UniqueName: \"kubernetes.io/projected/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-kube-api-access-ktpjn\") pod \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\" (UID: \"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a\") " Dec 03 20:01:25 crc kubenswrapper[4916]: I1203 20:01:25.567314 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-kube-api-access-ktpjn" (OuterVolumeSpecName: "kube-api-access-ktpjn") pod "d64868ee-2aa9-48b3-bfd7-895a9daf8c5a" (UID: "d64868ee-2aa9-48b3-bfd7-895a9daf8c5a"). InnerVolumeSpecName "kube-api-access-ktpjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:01:25 crc kubenswrapper[4916]: I1203 20:01:25.594138 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-inventory" (OuterVolumeSpecName: "inventory") pod "d64868ee-2aa9-48b3-bfd7-895a9daf8c5a" (UID: "d64868ee-2aa9-48b3-bfd7-895a9daf8c5a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:01:25 crc kubenswrapper[4916]: I1203 20:01:25.610890 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d64868ee-2aa9-48b3-bfd7-895a9daf8c5a" (UID: "d64868ee-2aa9-48b3-bfd7-895a9daf8c5a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:01:25 crc kubenswrapper[4916]: I1203 20:01:25.657203 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktpjn\" (UniqueName: \"kubernetes.io/projected/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-kube-api-access-ktpjn\") on node \"crc\" DevicePath \"\"" Dec 03 20:01:25 crc kubenswrapper[4916]: I1203 20:01:25.657242 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:01:25 crc kubenswrapper[4916]: I1203 20:01:25.657255 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d64868ee-2aa9-48b3-bfd7-895a9daf8c5a-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.056469 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" event={"ID":"d64868ee-2aa9-48b3-bfd7-895a9daf8c5a","Type":"ContainerDied","Data":"a4e44324ab44b7d05a40ea5d59a3ba16d07096ddfb8153f83935fca9e56b646e"} Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.056551 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4e44324ab44b7d05a40ea5d59a3ba16d07096ddfb8153f83935fca9e56b646e" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.057171 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.070170 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-9vjsv"] Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.087595 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-9vjsv"] Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.154325 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9"] Dec 03 20:01:26 crc kubenswrapper[4916]: E1203 20:01:26.155072 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d64868ee-2aa9-48b3-bfd7-895a9daf8c5a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.155112 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d64868ee-2aa9-48b3-bfd7-895a9daf8c5a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.155478 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="d64868ee-2aa9-48b3-bfd7-895a9daf8c5a" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.157277 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.162591 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.163784 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.163989 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.166512 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.186050 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9"] Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.276330 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klpx7\" (UniqueName: \"kubernetes.io/projected/6b66d006-a019-4921-9663-8fc348caf782-kube-api-access-klpx7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-88vj9\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.276759 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-88vj9\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.276818 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-88vj9\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.378940 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klpx7\" (UniqueName: \"kubernetes.io/projected/6b66d006-a019-4921-9663-8fc348caf782-kube-api-access-klpx7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-88vj9\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.379189 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-88vj9\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.379233 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-88vj9\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.387806 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-88vj9\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.387984 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-88vj9\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.403402 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klpx7\" (UniqueName: \"kubernetes.io/projected/6b66d006-a019-4921-9663-8fc348caf782-kube-api-access-klpx7\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-88vj9\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.477099 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:01:26 crc kubenswrapper[4916]: I1203 20:01:26.505514 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34e53902-e2e3-4757-b7ad-b9ff5431bd8c" path="/var/lib/kubelet/pods/34e53902-e2e3-4757-b7ad-b9ff5431bd8c/volumes" Dec 03 20:01:27 crc kubenswrapper[4916]: I1203 20:01:27.035002 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-vrzk5"] Dec 03 20:01:27 crc kubenswrapper[4916]: I1203 20:01:27.047518 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-vrzk5"] Dec 03 20:01:27 crc kubenswrapper[4916]: W1203 20:01:27.070881 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b66d006_a019_4921_9663_8fc348caf782.slice/crio-48388fa8bd27d9b3725e975db1b29d7d0c26b7e239c02edcc7c1bfd9bc00b9f0 WatchSource:0}: Error finding container 48388fa8bd27d9b3725e975db1b29d7d0c26b7e239c02edcc7c1bfd9bc00b9f0: Status 404 returned error can't find the container with id 48388fa8bd27d9b3725e975db1b29d7d0c26b7e239c02edcc7c1bfd9bc00b9f0 Dec 03 20:01:27 crc kubenswrapper[4916]: I1203 20:01:27.072343 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9"] Dec 03 20:01:28 crc kubenswrapper[4916]: I1203 20:01:28.079918 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" event={"ID":"6b66d006-a019-4921-9663-8fc348caf782","Type":"ContainerStarted","Data":"30058b58316b28b01c2bb959d62ea3b34ce3d236094d39834afb313f205e414a"} Dec 03 20:01:28 crc kubenswrapper[4916]: I1203 20:01:28.079963 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" event={"ID":"6b66d006-a019-4921-9663-8fc348caf782","Type":"ContainerStarted","Data":"48388fa8bd27d9b3725e975db1b29d7d0c26b7e239c02edcc7c1bfd9bc00b9f0"} Dec 03 20:01:28 crc kubenswrapper[4916]: I1203 20:01:28.102909 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" podStartSLOduration=1.685984082 podStartE2EDuration="2.102893615s" podCreationTimestamp="2025-12-03 20:01:26 +0000 UTC" firstStartedPulling="2025-12-03 20:01:27.073312263 +0000 UTC m=+1903.036122529" lastFinishedPulling="2025-12-03 20:01:27.490221786 +0000 UTC m=+1903.453032062" observedRunningTime="2025-12-03 20:01:28.097630894 +0000 UTC m=+1904.060441170" watchObservedRunningTime="2025-12-03 20:01:28.102893615 +0000 UTC m=+1904.065703881" Dec 03 20:01:28 crc kubenswrapper[4916]: I1203 20:01:28.498016 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46082ae5-9ed1-46c5-8320-d7477415de04" path="/var/lib/kubelet/pods/46082ae5-9ed1-46c5-8320-d7477415de04/volumes" Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.561134 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6gsts"] Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.567692 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.576483 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6gsts"] Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.657089 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-catalog-content\") pod \"community-operators-6gsts\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.657247 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-utilities\") pod \"community-operators-6gsts\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.657434 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfmhg\" (UniqueName: \"kubernetes.io/projected/cb57f4e7-c89d-4576-840a-d918d27a179a-kube-api-access-tfmhg\") pod \"community-operators-6gsts\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.759234 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-utilities\") pod \"community-operators-6gsts\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.759394 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfmhg\" (UniqueName: \"kubernetes.io/projected/cb57f4e7-c89d-4576-840a-d918d27a179a-kube-api-access-tfmhg\") pod \"community-operators-6gsts\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.759432 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-catalog-content\") pod \"community-operators-6gsts\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.759909 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-utilities\") pod \"community-operators-6gsts\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.759963 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-catalog-content\") pod \"community-operators-6gsts\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.783408 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfmhg\" (UniqueName: \"kubernetes.io/projected/cb57f4e7-c89d-4576-840a-d918d27a179a-kube-api-access-tfmhg\") pod \"community-operators-6gsts\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:01:52 crc kubenswrapper[4916]: I1203 20:01:52.904447 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:01:53 crc kubenswrapper[4916]: I1203 20:01:53.029528 4916 scope.go:117] "RemoveContainer" containerID="f8c194e4ca14af6ad3afab72bc55bbf94a95a6bf2536ce315003c0e2ece66735" Dec 03 20:01:53 crc kubenswrapper[4916]: I1203 20:01:53.170796 4916 scope.go:117] "RemoveContainer" containerID="b543e5ffd3f1a72659a5afbc25a8916c3f6113dc87c22096da4b20b1b42d943f" Dec 03 20:01:53 crc kubenswrapper[4916]: I1203 20:01:53.231406 4916 scope.go:117] "RemoveContainer" containerID="89be2dc057219ac07098b5d3860cdbe776077e2ab2a6120e7572e388f99fbe80" Dec 03 20:01:53 crc kubenswrapper[4916]: I1203 20:01:53.472297 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6gsts"] Dec 03 20:01:54 crc kubenswrapper[4916]: I1203 20:01:54.356349 4916 generic.go:334] "Generic (PLEG): container finished" podID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerID="20cb19cc4a6c5733ef06418b0a3e8bb1e231b7664d0e73bea3949fc46cc908d6" exitCode=0 Dec 03 20:01:54 crc kubenswrapper[4916]: I1203 20:01:54.356619 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gsts" event={"ID":"cb57f4e7-c89d-4576-840a-d918d27a179a","Type":"ContainerDied","Data":"20cb19cc4a6c5733ef06418b0a3e8bb1e231b7664d0e73bea3949fc46cc908d6"} Dec 03 20:01:54 crc kubenswrapper[4916]: I1203 20:01:54.356865 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gsts" event={"ID":"cb57f4e7-c89d-4576-840a-d918d27a179a","Type":"ContainerStarted","Data":"61e9ba1df19cd51860235c57b281a82dee1d2e1cb128260d46bd646e72f2d33d"} Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.367621 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gsts" event={"ID":"cb57f4e7-c89d-4576-840a-d918d27a179a","Type":"ContainerStarted","Data":"124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1"} Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.752022 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hj4q6"] Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.754482 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.766803 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hj4q6"] Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.817038 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7qdc\" (UniqueName: \"kubernetes.io/projected/860619d1-e5c1-49a4-886b-55f8e054c3b4-kube-api-access-p7qdc\") pod \"redhat-operators-hj4q6\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.817090 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-catalog-content\") pod \"redhat-operators-hj4q6\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.817146 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-utilities\") pod \"redhat-operators-hj4q6\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.919603 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7qdc\" (UniqueName: \"kubernetes.io/projected/860619d1-e5c1-49a4-886b-55f8e054c3b4-kube-api-access-p7qdc\") pod \"redhat-operators-hj4q6\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.919653 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-catalog-content\") pod \"redhat-operators-hj4q6\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.919692 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-utilities\") pod \"redhat-operators-hj4q6\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.920084 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-catalog-content\") pod \"redhat-operators-hj4q6\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.920152 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-utilities\") pod \"redhat-operators-hj4q6\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:01:55 crc kubenswrapper[4916]: I1203 20:01:55.944622 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7qdc\" (UniqueName: \"kubernetes.io/projected/860619d1-e5c1-49a4-886b-55f8e054c3b4-kube-api-access-p7qdc\") pod \"redhat-operators-hj4q6\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:01:56 crc kubenswrapper[4916]: I1203 20:01:56.075543 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:01:56 crc kubenswrapper[4916]: I1203 20:01:56.380929 4916 generic.go:334] "Generic (PLEG): container finished" podID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerID="124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1" exitCode=0 Dec 03 20:01:56 crc kubenswrapper[4916]: I1203 20:01:56.380991 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gsts" event={"ID":"cb57f4e7-c89d-4576-840a-d918d27a179a","Type":"ContainerDied","Data":"124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1"} Dec 03 20:01:56 crc kubenswrapper[4916]: I1203 20:01:56.540930 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hj4q6"] Dec 03 20:01:56 crc kubenswrapper[4916]: W1203 20:01:56.543606 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod860619d1_e5c1_49a4_886b_55f8e054c3b4.slice/crio-36a349f50004f3f7966e56f2266601208ed0757c3e6b5257c3e3f85b29774499 WatchSource:0}: Error finding container 36a349f50004f3f7966e56f2266601208ed0757c3e6b5257c3e3f85b29774499: Status 404 returned error can't find the container with id 36a349f50004f3f7966e56f2266601208ed0757c3e6b5257c3e3f85b29774499 Dec 03 20:01:57 crc kubenswrapper[4916]: I1203 20:01:57.392770 4916 generic.go:334] "Generic (PLEG): container finished" podID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerID="423d1ac0253efd4b2b958549782956a148a486ed24819e2284eae82c15fcd0ed" exitCode=0 Dec 03 20:01:57 crc kubenswrapper[4916]: I1203 20:01:57.392887 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hj4q6" event={"ID":"860619d1-e5c1-49a4-886b-55f8e054c3b4","Type":"ContainerDied","Data":"423d1ac0253efd4b2b958549782956a148a486ed24819e2284eae82c15fcd0ed"} Dec 03 20:01:57 crc kubenswrapper[4916]: I1203 20:01:57.393334 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hj4q6" event={"ID":"860619d1-e5c1-49a4-886b-55f8e054c3b4","Type":"ContainerStarted","Data":"36a349f50004f3f7966e56f2266601208ed0757c3e6b5257c3e3f85b29774499"} Dec 03 20:01:57 crc kubenswrapper[4916]: I1203 20:01:57.396508 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gsts" event={"ID":"cb57f4e7-c89d-4576-840a-d918d27a179a","Type":"ContainerStarted","Data":"1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef"} Dec 03 20:01:57 crc kubenswrapper[4916]: I1203 20:01:57.439177 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6gsts" podStartSLOduration=2.993444524 podStartE2EDuration="5.439152686s" podCreationTimestamp="2025-12-03 20:01:52 +0000 UTC" firstStartedPulling="2025-12-03 20:01:54.358594318 +0000 UTC m=+1930.321404594" lastFinishedPulling="2025-12-03 20:01:56.8043025 +0000 UTC m=+1932.767112756" observedRunningTime="2025-12-03 20:01:57.434053839 +0000 UTC m=+1933.396864195" watchObservedRunningTime="2025-12-03 20:01:57.439152686 +0000 UTC m=+1933.401962972" Dec 03 20:01:58 crc kubenswrapper[4916]: I1203 20:01:58.414644 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hj4q6" event={"ID":"860619d1-e5c1-49a4-886b-55f8e054c3b4","Type":"ContainerStarted","Data":"fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d"} Dec 03 20:02:01 crc kubenswrapper[4916]: I1203 20:02:01.458199 4916 generic.go:334] "Generic (PLEG): container finished" podID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerID="fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d" exitCode=0 Dec 03 20:02:01 crc kubenswrapper[4916]: I1203 20:02:01.458363 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hj4q6" event={"ID":"860619d1-e5c1-49a4-886b-55f8e054c3b4","Type":"ContainerDied","Data":"fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d"} Dec 03 20:02:02 crc kubenswrapper[4916]: I1203 20:02:02.905311 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:02:02 crc kubenswrapper[4916]: I1203 20:02:02.905810 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:02:03 crc kubenswrapper[4916]: I1203 20:02:03.479318 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hj4q6" event={"ID":"860619d1-e5c1-49a4-886b-55f8e054c3b4","Type":"ContainerStarted","Data":"44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735"} Dec 03 20:02:03 crc kubenswrapper[4916]: I1203 20:02:03.503679 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hj4q6" podStartSLOduration=3.232558416 podStartE2EDuration="8.503657684s" podCreationTimestamp="2025-12-03 20:01:55 +0000 UTC" firstStartedPulling="2025-12-03 20:01:57.394796403 +0000 UTC m=+1933.357606679" lastFinishedPulling="2025-12-03 20:02:02.665895671 +0000 UTC m=+1938.628705947" observedRunningTime="2025-12-03 20:02:03.500947491 +0000 UTC m=+1939.463757767" watchObservedRunningTime="2025-12-03 20:02:03.503657684 +0000 UTC m=+1939.466467960" Dec 03 20:02:03 crc kubenswrapper[4916]: I1203 20:02:03.975955 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-6gsts" podUID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerName="registry-server" probeResult="failure" output=< Dec 03 20:02:03 crc kubenswrapper[4916]: timeout: failed to connect service ":50051" within 1s Dec 03 20:02:03 crc kubenswrapper[4916]: > Dec 03 20:02:06 crc kubenswrapper[4916]: I1203 20:02:06.076496 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:02:06 crc kubenswrapper[4916]: I1203 20:02:06.076860 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:02:07 crc kubenswrapper[4916]: I1203 20:02:07.138669 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hj4q6" podUID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerName="registry-server" probeResult="failure" output=< Dec 03 20:02:07 crc kubenswrapper[4916]: timeout: failed to connect service ":50051" within 1s Dec 03 20:02:07 crc kubenswrapper[4916]: > Dec 03 20:02:12 crc kubenswrapper[4916]: I1203 20:02:12.059463 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-5t7sm"] Dec 03 20:02:12 crc kubenswrapper[4916]: I1203 20:02:12.067330 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-5t7sm"] Dec 03 20:02:12 crc kubenswrapper[4916]: I1203 20:02:12.504183 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80531945-5ede-449d-9903-5fe49857e211" path="/var/lib/kubelet/pods/80531945-5ede-449d-9903-5fe49857e211/volumes" Dec 03 20:02:12 crc kubenswrapper[4916]: I1203 20:02:12.607611 4916 generic.go:334] "Generic (PLEG): container finished" podID="6b66d006-a019-4921-9663-8fc348caf782" containerID="30058b58316b28b01c2bb959d62ea3b34ce3d236094d39834afb313f205e414a" exitCode=0 Dec 03 20:02:12 crc kubenswrapper[4916]: I1203 20:02:12.607714 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" event={"ID":"6b66d006-a019-4921-9663-8fc348caf782","Type":"ContainerDied","Data":"30058b58316b28b01c2bb959d62ea3b34ce3d236094d39834afb313f205e414a"} Dec 03 20:02:12 crc kubenswrapper[4916]: I1203 20:02:12.971281 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:02:13 crc kubenswrapper[4916]: I1203 20:02:13.040187 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:02:13 crc kubenswrapper[4916]: I1203 20:02:13.342033 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6gsts"] Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.139181 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.225685 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klpx7\" (UniqueName: \"kubernetes.io/projected/6b66d006-a019-4921-9663-8fc348caf782-kube-api-access-klpx7\") pod \"6b66d006-a019-4921-9663-8fc348caf782\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.225852 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-inventory\") pod \"6b66d006-a019-4921-9663-8fc348caf782\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.225901 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-ssh-key\") pod \"6b66d006-a019-4921-9663-8fc348caf782\" (UID: \"6b66d006-a019-4921-9663-8fc348caf782\") " Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.232251 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b66d006-a019-4921-9663-8fc348caf782-kube-api-access-klpx7" (OuterVolumeSpecName: "kube-api-access-klpx7") pod "6b66d006-a019-4921-9663-8fc348caf782" (UID: "6b66d006-a019-4921-9663-8fc348caf782"). InnerVolumeSpecName "kube-api-access-klpx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.256689 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-inventory" (OuterVolumeSpecName: "inventory") pod "6b66d006-a019-4921-9663-8fc348caf782" (UID: "6b66d006-a019-4921-9663-8fc348caf782"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.262538 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6b66d006-a019-4921-9663-8fc348caf782" (UID: "6b66d006-a019-4921-9663-8fc348caf782"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.328443 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klpx7\" (UniqueName: \"kubernetes.io/projected/6b66d006-a019-4921-9663-8fc348caf782-kube-api-access-klpx7\") on node \"crc\" DevicePath \"\"" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.328490 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.328503 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6b66d006-a019-4921-9663-8fc348caf782-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.626364 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6gsts" podUID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerName="registry-server" containerID="cri-o://1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef" gracePeriod=2 Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.626753 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.626809 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-88vj9" event={"ID":"6b66d006-a019-4921-9663-8fc348caf782","Type":"ContainerDied","Data":"48388fa8bd27d9b3725e975db1b29d7d0c26b7e239c02edcc7c1bfd9bc00b9f0"} Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.626854 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48388fa8bd27d9b3725e975db1b29d7d0c26b7e239c02edcc7c1bfd9bc00b9f0" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.727793 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz"] Dec 03 20:02:14 crc kubenswrapper[4916]: E1203 20:02:14.728155 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b66d006-a019-4921-9663-8fc348caf782" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.728172 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b66d006-a019-4921-9663-8fc348caf782" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.728372 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b66d006-a019-4921-9663-8fc348caf782" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.728992 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.731464 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.731702 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.740064 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.740129 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.743108 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz"] Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.842253 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6544j\" (UniqueName: \"kubernetes.io/projected/61740a0d-2157-431d-a999-802aad6cb402-kube-api-access-6544j\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.842330 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.842517 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.945938 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6544j\" (UniqueName: \"kubernetes.io/projected/61740a0d-2157-431d-a999-802aad6cb402-kube-api-access-6544j\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.946156 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.946191 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.952379 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.956217 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:02:14 crc kubenswrapper[4916]: I1203 20:02:14.963442 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6544j\" (UniqueName: \"kubernetes.io/projected/61740a0d-2157-431d-a999-802aad6cb402-kube-api-access-6544j\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.097304 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.198724 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.251012 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-utilities\") pod \"cb57f4e7-c89d-4576-840a-d918d27a179a\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.251163 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-catalog-content\") pod \"cb57f4e7-c89d-4576-840a-d918d27a179a\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.251332 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfmhg\" (UniqueName: \"kubernetes.io/projected/cb57f4e7-c89d-4576-840a-d918d27a179a-kube-api-access-tfmhg\") pod \"cb57f4e7-c89d-4576-840a-d918d27a179a\" (UID: \"cb57f4e7-c89d-4576-840a-d918d27a179a\") " Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.255807 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-utilities" (OuterVolumeSpecName: "utilities") pod "cb57f4e7-c89d-4576-840a-d918d27a179a" (UID: "cb57f4e7-c89d-4576-840a-d918d27a179a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.262792 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb57f4e7-c89d-4576-840a-d918d27a179a-kube-api-access-tfmhg" (OuterVolumeSpecName: "kube-api-access-tfmhg") pod "cb57f4e7-c89d-4576-840a-d918d27a179a" (UID: "cb57f4e7-c89d-4576-840a-d918d27a179a"). InnerVolumeSpecName "kube-api-access-tfmhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.315891 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb57f4e7-c89d-4576-840a-d918d27a179a" (UID: "cb57f4e7-c89d-4576-840a-d918d27a179a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.355414 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfmhg\" (UniqueName: \"kubernetes.io/projected/cb57f4e7-c89d-4576-840a-d918d27a179a-kube-api-access-tfmhg\") on node \"crc\" DevicePath \"\"" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.355455 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.355473 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb57f4e7-c89d-4576-840a-d918d27a179a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.644847 4916 generic.go:334] "Generic (PLEG): container finished" podID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerID="1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef" exitCode=0 Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.644894 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gsts" event={"ID":"cb57f4e7-c89d-4576-840a-d918d27a179a","Type":"ContainerDied","Data":"1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef"} Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.644907 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6gsts" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.644926 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6gsts" event={"ID":"cb57f4e7-c89d-4576-840a-d918d27a179a","Type":"ContainerDied","Data":"61e9ba1df19cd51860235c57b281a82dee1d2e1cb128260d46bd646e72f2d33d"} Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.644959 4916 scope.go:117] "RemoveContainer" containerID="1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.669275 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz"] Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.686356 4916 scope.go:117] "RemoveContainer" containerID="124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.694784 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6gsts"] Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.721917 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6gsts"] Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.750799 4916 scope.go:117] "RemoveContainer" containerID="20cb19cc4a6c5733ef06418b0a3e8bb1e231b7664d0e73bea3949fc46cc908d6" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.778383 4916 scope.go:117] "RemoveContainer" containerID="1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef" Dec 03 20:02:15 crc kubenswrapper[4916]: E1203 20:02:15.778994 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef\": container with ID starting with 1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef not found: ID does not exist" containerID="1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.779029 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef"} err="failed to get container status \"1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef\": rpc error: code = NotFound desc = could not find container \"1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef\": container with ID starting with 1e8d4f098fb1753bd8f6e18b52e99385290a0a6a83bd5e35db1df5c0eb44b7ef not found: ID does not exist" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.779049 4916 scope.go:117] "RemoveContainer" containerID="124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1" Dec 03 20:02:15 crc kubenswrapper[4916]: E1203 20:02:15.779269 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1\": container with ID starting with 124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1 not found: ID does not exist" containerID="124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.779291 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1"} err="failed to get container status \"124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1\": rpc error: code = NotFound desc = could not find container \"124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1\": container with ID starting with 124478a4bf290003198b2795ef1346b27684cfc8ea58a279aec39782090109e1 not found: ID does not exist" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.779304 4916 scope.go:117] "RemoveContainer" containerID="20cb19cc4a6c5733ef06418b0a3e8bb1e231b7664d0e73bea3949fc46cc908d6" Dec 03 20:02:15 crc kubenswrapper[4916]: E1203 20:02:15.779607 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20cb19cc4a6c5733ef06418b0a3e8bb1e231b7664d0e73bea3949fc46cc908d6\": container with ID starting with 20cb19cc4a6c5733ef06418b0a3e8bb1e231b7664d0e73bea3949fc46cc908d6 not found: ID does not exist" containerID="20cb19cc4a6c5733ef06418b0a3e8bb1e231b7664d0e73bea3949fc46cc908d6" Dec 03 20:02:15 crc kubenswrapper[4916]: I1203 20:02:15.779628 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20cb19cc4a6c5733ef06418b0a3e8bb1e231b7664d0e73bea3949fc46cc908d6"} err="failed to get container status \"20cb19cc4a6c5733ef06418b0a3e8bb1e231b7664d0e73bea3949fc46cc908d6\": rpc error: code = NotFound desc = could not find container \"20cb19cc4a6c5733ef06418b0a3e8bb1e231b7664d0e73bea3949fc46cc908d6\": container with ID starting with 20cb19cc4a6c5733ef06418b0a3e8bb1e231b7664d0e73bea3949fc46cc908d6 not found: ID does not exist" Dec 03 20:02:16 crc kubenswrapper[4916]: I1203 20:02:16.141338 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:02:16 crc kubenswrapper[4916]: I1203 20:02:16.159859 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:02:16 crc kubenswrapper[4916]: I1203 20:02:16.159922 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:02:16 crc kubenswrapper[4916]: I1203 20:02:16.214630 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:02:16 crc kubenswrapper[4916]: I1203 20:02:16.487210 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb57f4e7-c89d-4576-840a-d918d27a179a" path="/var/lib/kubelet/pods/cb57f4e7-c89d-4576-840a-d918d27a179a/volumes" Dec 03 20:02:16 crc kubenswrapper[4916]: I1203 20:02:16.653523 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" event={"ID":"61740a0d-2157-431d-a999-802aad6cb402","Type":"ContainerStarted","Data":"7b77fe304067321fbc2f1ead8dc5b51c0d3020be25910eac8ae5d173ea3ed619"} Dec 03 20:02:16 crc kubenswrapper[4916]: I1203 20:02:16.653596 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" event={"ID":"61740a0d-2157-431d-a999-802aad6cb402","Type":"ContainerStarted","Data":"9ce59d1c662bd1f1cdbe3fbfe36d73b7f473ba0546637d4bc0e4b3006e9a83ac"} Dec 03 20:02:17 crc kubenswrapper[4916]: I1203 20:02:17.733335 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" podStartSLOduration=3.288737614 podStartE2EDuration="3.733320032s" podCreationTimestamp="2025-12-03 20:02:14 +0000 UTC" firstStartedPulling="2025-12-03 20:02:15.686345704 +0000 UTC m=+1951.649155970" lastFinishedPulling="2025-12-03 20:02:16.130928122 +0000 UTC m=+1952.093738388" observedRunningTime="2025-12-03 20:02:16.677086393 +0000 UTC m=+1952.639896659" watchObservedRunningTime="2025-12-03 20:02:17.733320032 +0000 UTC m=+1953.696130298" Dec 03 20:02:17 crc kubenswrapper[4916]: I1203 20:02:17.736387 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hj4q6"] Dec 03 20:02:17 crc kubenswrapper[4916]: I1203 20:02:17.737180 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hj4q6" podUID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerName="registry-server" containerID="cri-o://44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735" gracePeriod=2 Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.357303 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.449125 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-utilities\") pod \"860619d1-e5c1-49a4-886b-55f8e054c3b4\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.449181 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7qdc\" (UniqueName: \"kubernetes.io/projected/860619d1-e5c1-49a4-886b-55f8e054c3b4-kube-api-access-p7qdc\") pod \"860619d1-e5c1-49a4-886b-55f8e054c3b4\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.449376 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-catalog-content\") pod \"860619d1-e5c1-49a4-886b-55f8e054c3b4\" (UID: \"860619d1-e5c1-49a4-886b-55f8e054c3b4\") " Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.450243 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-utilities" (OuterVolumeSpecName: "utilities") pod "860619d1-e5c1-49a4-886b-55f8e054c3b4" (UID: "860619d1-e5c1-49a4-886b-55f8e054c3b4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.456943 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/860619d1-e5c1-49a4-886b-55f8e054c3b4-kube-api-access-p7qdc" (OuterVolumeSpecName: "kube-api-access-p7qdc") pod "860619d1-e5c1-49a4-886b-55f8e054c3b4" (UID: "860619d1-e5c1-49a4-886b-55f8e054c3b4"). InnerVolumeSpecName "kube-api-access-p7qdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.552200 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.552306 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7qdc\" (UniqueName: \"kubernetes.io/projected/860619d1-e5c1-49a4-886b-55f8e054c3b4-kube-api-access-p7qdc\") on node \"crc\" DevicePath \"\"" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.559718 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "860619d1-e5c1-49a4-886b-55f8e054c3b4" (UID: "860619d1-e5c1-49a4-886b-55f8e054c3b4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.653916 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/860619d1-e5c1-49a4-886b-55f8e054c3b4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.673675 4916 generic.go:334] "Generic (PLEG): container finished" podID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerID="44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735" exitCode=0 Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.673864 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hj4q6" event={"ID":"860619d1-e5c1-49a4-886b-55f8e054c3b4","Type":"ContainerDied","Data":"44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735"} Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.673961 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hj4q6" event={"ID":"860619d1-e5c1-49a4-886b-55f8e054c3b4","Type":"ContainerDied","Data":"36a349f50004f3f7966e56f2266601208ed0757c3e6b5257c3e3f85b29774499"} Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.673880 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hj4q6" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.673999 4916 scope.go:117] "RemoveContainer" containerID="44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.705260 4916 scope.go:117] "RemoveContainer" containerID="fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.729042 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hj4q6"] Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.736995 4916 scope.go:117] "RemoveContainer" containerID="423d1ac0253efd4b2b958549782956a148a486ed24819e2284eae82c15fcd0ed" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.743339 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hj4q6"] Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.789497 4916 scope.go:117] "RemoveContainer" containerID="44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735" Dec 03 20:02:18 crc kubenswrapper[4916]: E1203 20:02:18.790061 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735\": container with ID starting with 44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735 not found: ID does not exist" containerID="44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.790138 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735"} err="failed to get container status \"44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735\": rpc error: code = NotFound desc = could not find container \"44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735\": container with ID starting with 44ee4042196ae68b7ece3c05d229db5a02369afa91747b724e9d68bfabecd735 not found: ID does not exist" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.790180 4916 scope.go:117] "RemoveContainer" containerID="fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d" Dec 03 20:02:18 crc kubenswrapper[4916]: E1203 20:02:18.790515 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d\": container with ID starting with fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d not found: ID does not exist" containerID="fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.790561 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d"} err="failed to get container status \"fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d\": rpc error: code = NotFound desc = could not find container \"fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d\": container with ID starting with fa6a0b6d92c87ad2fa52d0dbed0f67b018be6be2b0934d0d68186e1c9304756d not found: ID does not exist" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.790608 4916 scope.go:117] "RemoveContainer" containerID="423d1ac0253efd4b2b958549782956a148a486ed24819e2284eae82c15fcd0ed" Dec 03 20:02:18 crc kubenswrapper[4916]: E1203 20:02:18.790954 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"423d1ac0253efd4b2b958549782956a148a486ed24819e2284eae82c15fcd0ed\": container with ID starting with 423d1ac0253efd4b2b958549782956a148a486ed24819e2284eae82c15fcd0ed not found: ID does not exist" containerID="423d1ac0253efd4b2b958549782956a148a486ed24819e2284eae82c15fcd0ed" Dec 03 20:02:18 crc kubenswrapper[4916]: I1203 20:02:18.791006 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"423d1ac0253efd4b2b958549782956a148a486ed24819e2284eae82c15fcd0ed"} err="failed to get container status \"423d1ac0253efd4b2b958549782956a148a486ed24819e2284eae82c15fcd0ed\": rpc error: code = NotFound desc = could not find container \"423d1ac0253efd4b2b958549782956a148a486ed24819e2284eae82c15fcd0ed\": container with ID starting with 423d1ac0253efd4b2b958549782956a148a486ed24819e2284eae82c15fcd0ed not found: ID does not exist" Dec 03 20:02:20 crc kubenswrapper[4916]: I1203 20:02:20.494717 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="860619d1-e5c1-49a4-886b-55f8e054c3b4" path="/var/lib/kubelet/pods/860619d1-e5c1-49a4-886b-55f8e054c3b4/volumes" Dec 03 20:02:46 crc kubenswrapper[4916]: I1203 20:02:46.159532 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:02:46 crc kubenswrapper[4916]: I1203 20:02:46.160255 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:02:53 crc kubenswrapper[4916]: I1203 20:02:53.327453 4916 scope.go:117] "RemoveContainer" containerID="e10f7549a490272f9fc7f8c7d38d07e96c3a775215e6ea8ef3cb304b4f5cb099" Dec 03 20:03:16 crc kubenswrapper[4916]: I1203 20:03:16.159386 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:03:16 crc kubenswrapper[4916]: I1203 20:03:16.160265 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:03:16 crc kubenswrapper[4916]: I1203 20:03:16.160360 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 20:03:16 crc kubenswrapper[4916]: I1203 20:03:16.161781 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e13c1fbfb649ac0f9340740b2db42f1899368837b532c074802cbbdf37483fd4"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 20:03:16 crc kubenswrapper[4916]: I1203 20:03:16.161920 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://e13c1fbfb649ac0f9340740b2db42f1899368837b532c074802cbbdf37483fd4" gracePeriod=600 Dec 03 20:03:16 crc kubenswrapper[4916]: I1203 20:03:16.314486 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="e13c1fbfb649ac0f9340740b2db42f1899368837b532c074802cbbdf37483fd4" exitCode=0 Dec 03 20:03:16 crc kubenswrapper[4916]: I1203 20:03:16.314522 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"e13c1fbfb649ac0f9340740b2db42f1899368837b532c074802cbbdf37483fd4"} Dec 03 20:03:16 crc kubenswrapper[4916]: I1203 20:03:16.314557 4916 scope.go:117] "RemoveContainer" containerID="1cfe11597331b5a6642872ae4566ca8ad37359896bb4e39b00a4e14690d5ef72" Dec 03 20:03:17 crc kubenswrapper[4916]: I1203 20:03:17.325039 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab"} Dec 03 20:03:18 crc kubenswrapper[4916]: I1203 20:03:18.337922 4916 generic.go:334] "Generic (PLEG): container finished" podID="61740a0d-2157-431d-a999-802aad6cb402" containerID="7b77fe304067321fbc2f1ead8dc5b51c0d3020be25910eac8ae5d173ea3ed619" exitCode=0 Dec 03 20:03:18 crc kubenswrapper[4916]: I1203 20:03:18.338037 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" event={"ID":"61740a0d-2157-431d-a999-802aad6cb402","Type":"ContainerDied","Data":"7b77fe304067321fbc2f1ead8dc5b51c0d3020be25910eac8ae5d173ea3ed619"} Dec 03 20:03:19 crc kubenswrapper[4916]: I1203 20:03:19.801683 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:03:19 crc kubenswrapper[4916]: I1203 20:03:19.876249 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-ssh-key\") pod \"61740a0d-2157-431d-a999-802aad6cb402\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " Dec 03 20:03:19 crc kubenswrapper[4916]: I1203 20:03:19.876666 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-inventory\") pod \"61740a0d-2157-431d-a999-802aad6cb402\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " Dec 03 20:03:19 crc kubenswrapper[4916]: I1203 20:03:19.876727 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6544j\" (UniqueName: \"kubernetes.io/projected/61740a0d-2157-431d-a999-802aad6cb402-kube-api-access-6544j\") pod \"61740a0d-2157-431d-a999-802aad6cb402\" (UID: \"61740a0d-2157-431d-a999-802aad6cb402\") " Dec 03 20:03:19 crc kubenswrapper[4916]: I1203 20:03:19.881323 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61740a0d-2157-431d-a999-802aad6cb402-kube-api-access-6544j" (OuterVolumeSpecName: "kube-api-access-6544j") pod "61740a0d-2157-431d-a999-802aad6cb402" (UID: "61740a0d-2157-431d-a999-802aad6cb402"). InnerVolumeSpecName "kube-api-access-6544j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:03:19 crc kubenswrapper[4916]: I1203 20:03:19.900920 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61740a0d-2157-431d-a999-802aad6cb402" (UID: "61740a0d-2157-431d-a999-802aad6cb402"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:03:19 crc kubenswrapper[4916]: I1203 20:03:19.911838 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-inventory" (OuterVolumeSpecName: "inventory") pod "61740a0d-2157-431d-a999-802aad6cb402" (UID: "61740a0d-2157-431d-a999-802aad6cb402"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:03:19 crc kubenswrapper[4916]: I1203 20:03:19.979774 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:03:19 crc kubenswrapper[4916]: I1203 20:03:19.980064 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61740a0d-2157-431d-a999-802aad6cb402-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:03:19 crc kubenswrapper[4916]: I1203 20:03:19.980190 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6544j\" (UniqueName: \"kubernetes.io/projected/61740a0d-2157-431d-a999-802aad6cb402-kube-api-access-6544j\") on node \"crc\" DevicePath \"\"" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.359197 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" event={"ID":"61740a0d-2157-431d-a999-802aad6cb402","Type":"ContainerDied","Data":"9ce59d1c662bd1f1cdbe3fbfe36d73b7f473ba0546637d4bc0e4b3006e9a83ac"} Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.359234 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ce59d1c662bd1f1cdbe3fbfe36d73b7f473ba0546637d4bc0e4b3006e9a83ac" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.359298 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.449953 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qcp8f"] Dec 03 20:03:20 crc kubenswrapper[4916]: E1203 20:03:20.450403 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerName="registry-server" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.450428 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerName="registry-server" Dec 03 20:03:20 crc kubenswrapper[4916]: E1203 20:03:20.450448 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerName="extract-utilities" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.450459 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerName="extract-utilities" Dec 03 20:03:20 crc kubenswrapper[4916]: E1203 20:03:20.450481 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61740a0d-2157-431d-a999-802aad6cb402" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.450490 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="61740a0d-2157-431d-a999-802aad6cb402" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:03:20 crc kubenswrapper[4916]: E1203 20:03:20.450508 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerName="extract-utilities" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.450515 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerName="extract-utilities" Dec 03 20:03:20 crc kubenswrapper[4916]: E1203 20:03:20.450539 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerName="extract-content" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.450548 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerName="extract-content" Dec 03 20:03:20 crc kubenswrapper[4916]: E1203 20:03:20.450601 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerName="extract-content" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.450609 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerName="extract-content" Dec 03 20:03:20 crc kubenswrapper[4916]: E1203 20:03:20.450622 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerName="registry-server" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.450627 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerName="registry-server" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.450785 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb57f4e7-c89d-4576-840a-d918d27a179a" containerName="registry-server" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.450811 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="860619d1-e5c1-49a4-886b-55f8e054c3b4" containerName="registry-server" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.450822 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="61740a0d-2157-431d-a999-802aad6cb402" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.451433 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.457199 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.457378 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.457430 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.457555 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.459129 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qcp8f"] Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.591740 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lkr8\" (UniqueName: \"kubernetes.io/projected/d3c34a0f-3914-4307-9e37-317749a61c02-kube-api-access-2lkr8\") pod \"ssh-known-hosts-edpm-deployment-qcp8f\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.591838 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qcp8f\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.591898 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qcp8f\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.694170 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lkr8\" (UniqueName: \"kubernetes.io/projected/d3c34a0f-3914-4307-9e37-317749a61c02-kube-api-access-2lkr8\") pod \"ssh-known-hosts-edpm-deployment-qcp8f\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.694785 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qcp8f\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.695094 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qcp8f\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.705706 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qcp8f\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.705982 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qcp8f\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.720488 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lkr8\" (UniqueName: \"kubernetes.io/projected/d3c34a0f-3914-4307-9e37-317749a61c02-kube-api-access-2lkr8\") pod \"ssh-known-hosts-edpm-deployment-qcp8f\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:20 crc kubenswrapper[4916]: I1203 20:03:20.782155 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:21 crc kubenswrapper[4916]: I1203 20:03:21.345342 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qcp8f"] Dec 03 20:03:21 crc kubenswrapper[4916]: I1203 20:03:21.351799 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 20:03:21 crc kubenswrapper[4916]: I1203 20:03:21.370469 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" event={"ID":"d3c34a0f-3914-4307-9e37-317749a61c02","Type":"ContainerStarted","Data":"3ca40484ad5860bcb7ebbdd707785b516971aa107705c4cd13dcc0e7f00a035b"} Dec 03 20:03:23 crc kubenswrapper[4916]: I1203 20:03:23.391716 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" event={"ID":"d3c34a0f-3914-4307-9e37-317749a61c02","Type":"ContainerStarted","Data":"02e9c88256811d2ee875b08f484da0c3a150323350f053ff038095247039b9c6"} Dec 03 20:03:23 crc kubenswrapper[4916]: I1203 20:03:23.414700 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" podStartSLOduration=2.376383004 podStartE2EDuration="3.414681181s" podCreationTimestamp="2025-12-03 20:03:20 +0000 UTC" firstStartedPulling="2025-12-03 20:03:21.351108307 +0000 UTC m=+2017.313918613" lastFinishedPulling="2025-12-03 20:03:22.389406514 +0000 UTC m=+2018.352216790" observedRunningTime="2025-12-03 20:03:23.413843758 +0000 UTC m=+2019.376654044" watchObservedRunningTime="2025-12-03 20:03:23.414681181 +0000 UTC m=+2019.377491447" Dec 03 20:03:31 crc kubenswrapper[4916]: I1203 20:03:31.478396 4916 generic.go:334] "Generic (PLEG): container finished" podID="d3c34a0f-3914-4307-9e37-317749a61c02" containerID="02e9c88256811d2ee875b08f484da0c3a150323350f053ff038095247039b9c6" exitCode=0 Dec 03 20:03:31 crc kubenswrapper[4916]: I1203 20:03:31.478502 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" event={"ID":"d3c34a0f-3914-4307-9e37-317749a61c02","Type":"ContainerDied","Data":"02e9c88256811d2ee875b08f484da0c3a150323350f053ff038095247039b9c6"} Dec 03 20:03:32 crc kubenswrapper[4916]: I1203 20:03:32.935286 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.080814 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lkr8\" (UniqueName: \"kubernetes.io/projected/d3c34a0f-3914-4307-9e37-317749a61c02-kube-api-access-2lkr8\") pod \"d3c34a0f-3914-4307-9e37-317749a61c02\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.080925 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-inventory-0\") pod \"d3c34a0f-3914-4307-9e37-317749a61c02\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.081110 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-ssh-key-openstack-edpm-ipam\") pod \"d3c34a0f-3914-4307-9e37-317749a61c02\" (UID: \"d3c34a0f-3914-4307-9e37-317749a61c02\") " Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.089682 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3c34a0f-3914-4307-9e37-317749a61c02-kube-api-access-2lkr8" (OuterVolumeSpecName: "kube-api-access-2lkr8") pod "d3c34a0f-3914-4307-9e37-317749a61c02" (UID: "d3c34a0f-3914-4307-9e37-317749a61c02"). InnerVolumeSpecName "kube-api-access-2lkr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.123732 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "d3c34a0f-3914-4307-9e37-317749a61c02" (UID: "d3c34a0f-3914-4307-9e37-317749a61c02"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.152729 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "d3c34a0f-3914-4307-9e37-317749a61c02" (UID: "d3c34a0f-3914-4307-9e37-317749a61c02"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.186945 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lkr8\" (UniqueName: \"kubernetes.io/projected/d3c34a0f-3914-4307-9e37-317749a61c02-kube-api-access-2lkr8\") on node \"crc\" DevicePath \"\"" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.186995 4916 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.187009 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/d3c34a0f-3914-4307-9e37-317749a61c02-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.499488 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" event={"ID":"d3c34a0f-3914-4307-9e37-317749a61c02","Type":"ContainerDied","Data":"3ca40484ad5860bcb7ebbdd707785b516971aa107705c4cd13dcc0e7f00a035b"} Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.499619 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qcp8f" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.499654 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ca40484ad5860bcb7ebbdd707785b516971aa107705c4cd13dcc0e7f00a035b" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.610767 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc"] Dec 03 20:03:33 crc kubenswrapper[4916]: E1203 20:03:33.611635 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c34a0f-3914-4307-9e37-317749a61c02" containerName="ssh-known-hosts-edpm-deployment" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.611669 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c34a0f-3914-4307-9e37-317749a61c02" containerName="ssh-known-hosts-edpm-deployment" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.612116 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3c34a0f-3914-4307-9e37-317749a61c02" containerName="ssh-known-hosts-edpm-deployment" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.613483 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.622218 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc"] Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.657011 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.657035 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.657635 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.657994 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.695959 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4pcnc\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.696164 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ln7g\" (UniqueName: \"kubernetes.io/projected/ace07bb7-8494-4a26-9737-33b0407dde91-kube-api-access-7ln7g\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4pcnc\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.696447 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4pcnc\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.798727 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4pcnc\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.798840 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ln7g\" (UniqueName: \"kubernetes.io/projected/ace07bb7-8494-4a26-9737-33b0407dde91-kube-api-access-7ln7g\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4pcnc\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.799000 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4pcnc\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.802838 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4pcnc\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.803677 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4pcnc\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.827939 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ln7g\" (UniqueName: \"kubernetes.io/projected/ace07bb7-8494-4a26-9737-33b0407dde91-kube-api-access-7ln7g\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-4pcnc\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:33 crc kubenswrapper[4916]: I1203 20:03:33.978323 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:34 crc kubenswrapper[4916]: W1203 20:03:34.553831 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podace07bb7_8494_4a26_9737_33b0407dde91.slice/crio-95ce6b44016676f59c53c9073eb16ecaf28ec80247cea8215a3abce63a1dfc2c WatchSource:0}: Error finding container 95ce6b44016676f59c53c9073eb16ecaf28ec80247cea8215a3abce63a1dfc2c: Status 404 returned error can't find the container with id 95ce6b44016676f59c53c9073eb16ecaf28ec80247cea8215a3abce63a1dfc2c Dec 03 20:03:34 crc kubenswrapper[4916]: I1203 20:03:34.554521 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc"] Dec 03 20:03:35 crc kubenswrapper[4916]: I1203 20:03:35.522190 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" event={"ID":"ace07bb7-8494-4a26-9737-33b0407dde91","Type":"ContainerStarted","Data":"4387d6598842a6b8ea46e53258a6f35db1fd7fc83ff5521fdd4b42120fc8dd65"} Dec 03 20:03:35 crc kubenswrapper[4916]: I1203 20:03:35.523132 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" event={"ID":"ace07bb7-8494-4a26-9737-33b0407dde91","Type":"ContainerStarted","Data":"95ce6b44016676f59c53c9073eb16ecaf28ec80247cea8215a3abce63a1dfc2c"} Dec 03 20:03:35 crc kubenswrapper[4916]: I1203 20:03:35.541341 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" podStartSLOduration=2.0154724 podStartE2EDuration="2.541316334s" podCreationTimestamp="2025-12-03 20:03:33 +0000 UTC" firstStartedPulling="2025-12-03 20:03:34.556547606 +0000 UTC m=+2030.519357882" lastFinishedPulling="2025-12-03 20:03:35.08239152 +0000 UTC m=+2031.045201816" observedRunningTime="2025-12-03 20:03:35.535867407 +0000 UTC m=+2031.498677673" watchObservedRunningTime="2025-12-03 20:03:35.541316334 +0000 UTC m=+2031.504126630" Dec 03 20:03:45 crc kubenswrapper[4916]: I1203 20:03:45.614630 4916 generic.go:334] "Generic (PLEG): container finished" podID="ace07bb7-8494-4a26-9737-33b0407dde91" containerID="4387d6598842a6b8ea46e53258a6f35db1fd7fc83ff5521fdd4b42120fc8dd65" exitCode=0 Dec 03 20:03:45 crc kubenswrapper[4916]: I1203 20:03:45.615248 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" event={"ID":"ace07bb7-8494-4a26-9737-33b0407dde91","Type":"ContainerDied","Data":"4387d6598842a6b8ea46e53258a6f35db1fd7fc83ff5521fdd4b42120fc8dd65"} Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.115115 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.174553 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ln7g\" (UniqueName: \"kubernetes.io/projected/ace07bb7-8494-4a26-9737-33b0407dde91-kube-api-access-7ln7g\") pod \"ace07bb7-8494-4a26-9737-33b0407dde91\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.174859 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-inventory\") pod \"ace07bb7-8494-4a26-9737-33b0407dde91\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.175260 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-ssh-key\") pod \"ace07bb7-8494-4a26-9737-33b0407dde91\" (UID: \"ace07bb7-8494-4a26-9737-33b0407dde91\") " Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.180547 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ace07bb7-8494-4a26-9737-33b0407dde91-kube-api-access-7ln7g" (OuterVolumeSpecName: "kube-api-access-7ln7g") pod "ace07bb7-8494-4a26-9737-33b0407dde91" (UID: "ace07bb7-8494-4a26-9737-33b0407dde91"). InnerVolumeSpecName "kube-api-access-7ln7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.205013 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-inventory" (OuterVolumeSpecName: "inventory") pod "ace07bb7-8494-4a26-9737-33b0407dde91" (UID: "ace07bb7-8494-4a26-9737-33b0407dde91"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.229143 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ace07bb7-8494-4a26-9737-33b0407dde91" (UID: "ace07bb7-8494-4a26-9737-33b0407dde91"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.279141 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.279189 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ln7g\" (UniqueName: \"kubernetes.io/projected/ace07bb7-8494-4a26-9737-33b0407dde91-kube-api-access-7ln7g\") on node \"crc\" DevicePath \"\"" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.279202 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ace07bb7-8494-4a26-9737-33b0407dde91-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.633969 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" event={"ID":"ace07bb7-8494-4a26-9737-33b0407dde91","Type":"ContainerDied","Data":"95ce6b44016676f59c53c9073eb16ecaf28ec80247cea8215a3abce63a1dfc2c"} Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.634009 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95ce6b44016676f59c53c9073eb16ecaf28ec80247cea8215a3abce63a1dfc2c" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.634036 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-4pcnc" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.768769 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f"] Dec 03 20:03:47 crc kubenswrapper[4916]: E1203 20:03:47.769161 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ace07bb7-8494-4a26-9737-33b0407dde91" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.769179 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="ace07bb7-8494-4a26-9737-33b0407dde91" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.769378 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="ace07bb7-8494-4a26-9737-33b0407dde91" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.770002 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.772647 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.772762 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.772653 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.772953 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.782880 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f"] Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.887868 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-m572f\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.887939 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-m572f\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.888055 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srthd\" (UniqueName: \"kubernetes.io/projected/c7109f13-10df-437e-96da-34c0889a9231-kube-api-access-srthd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-m572f\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.989327 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srthd\" (UniqueName: \"kubernetes.io/projected/c7109f13-10df-437e-96da-34c0889a9231-kube-api-access-srthd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-m572f\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.989428 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-m572f\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.989497 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-m572f\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.996259 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-m572f\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:03:47 crc kubenswrapper[4916]: I1203 20:03:47.997992 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-m572f\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:03:48 crc kubenswrapper[4916]: I1203 20:03:48.008307 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srthd\" (UniqueName: \"kubernetes.io/projected/c7109f13-10df-437e-96da-34c0889a9231-kube-api-access-srthd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-m572f\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:03:48 crc kubenswrapper[4916]: I1203 20:03:48.086019 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:03:48 crc kubenswrapper[4916]: I1203 20:03:48.639162 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f"] Dec 03 20:03:49 crc kubenswrapper[4916]: I1203 20:03:49.655581 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" event={"ID":"c7109f13-10df-437e-96da-34c0889a9231","Type":"ContainerStarted","Data":"063b323b1fcaf1ceb863ecf007420e2bac90f6fa3856da067cce0a23d92ee5bc"} Dec 03 20:03:49 crc kubenswrapper[4916]: I1203 20:03:49.656148 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" event={"ID":"c7109f13-10df-437e-96da-34c0889a9231","Type":"ContainerStarted","Data":"45439c32852c3dcb873a229545408693be62a5aa2c5a90a2b1c3886ae4a26e74"} Dec 03 20:03:49 crc kubenswrapper[4916]: I1203 20:03:49.684587 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" podStartSLOduration=2.268402533 podStartE2EDuration="2.684552566s" podCreationTimestamp="2025-12-03 20:03:47 +0000 UTC" firstStartedPulling="2025-12-03 20:03:48.644753028 +0000 UTC m=+2044.607563294" lastFinishedPulling="2025-12-03 20:03:49.060903061 +0000 UTC m=+2045.023713327" observedRunningTime="2025-12-03 20:03:49.681949706 +0000 UTC m=+2045.644760012" watchObservedRunningTime="2025-12-03 20:03:49.684552566 +0000 UTC m=+2045.647362832" Dec 03 20:03:59 crc kubenswrapper[4916]: I1203 20:03:59.759827 4916 generic.go:334] "Generic (PLEG): container finished" podID="c7109f13-10df-437e-96da-34c0889a9231" containerID="063b323b1fcaf1ceb863ecf007420e2bac90f6fa3856da067cce0a23d92ee5bc" exitCode=0 Dec 03 20:03:59 crc kubenswrapper[4916]: I1203 20:03:59.759915 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" event={"ID":"c7109f13-10df-437e-96da-34c0889a9231","Type":"ContainerDied","Data":"063b323b1fcaf1ceb863ecf007420e2bac90f6fa3856da067cce0a23d92ee5bc"} Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.263040 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.369224 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srthd\" (UniqueName: \"kubernetes.io/projected/c7109f13-10df-437e-96da-34c0889a9231-kube-api-access-srthd\") pod \"c7109f13-10df-437e-96da-34c0889a9231\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.369327 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-inventory\") pod \"c7109f13-10df-437e-96da-34c0889a9231\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.369401 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-ssh-key\") pod \"c7109f13-10df-437e-96da-34c0889a9231\" (UID: \"c7109f13-10df-437e-96da-34c0889a9231\") " Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.379811 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7109f13-10df-437e-96da-34c0889a9231-kube-api-access-srthd" (OuterVolumeSpecName: "kube-api-access-srthd") pod "c7109f13-10df-437e-96da-34c0889a9231" (UID: "c7109f13-10df-437e-96da-34c0889a9231"). InnerVolumeSpecName "kube-api-access-srthd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.403225 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c7109f13-10df-437e-96da-34c0889a9231" (UID: "c7109f13-10df-437e-96da-34c0889a9231"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.414299 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-inventory" (OuterVolumeSpecName: "inventory") pod "c7109f13-10df-437e-96da-34c0889a9231" (UID: "c7109f13-10df-437e-96da-34c0889a9231"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.471226 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srthd\" (UniqueName: \"kubernetes.io/projected/c7109f13-10df-437e-96da-34c0889a9231-kube-api-access-srthd\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.471259 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.471268 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c7109f13-10df-437e-96da-34c0889a9231-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.777027 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" event={"ID":"c7109f13-10df-437e-96da-34c0889a9231","Type":"ContainerDied","Data":"45439c32852c3dcb873a229545408693be62a5aa2c5a90a2b1c3886ae4a26e74"} Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.777392 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45439c32852c3dcb873a229545408693be62a5aa2c5a90a2b1c3886ae4a26e74" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.777290 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-m572f" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.914956 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp"] Dec 03 20:04:01 crc kubenswrapper[4916]: E1203 20:04:01.915402 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7109f13-10df-437e-96da-34c0889a9231" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.915425 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7109f13-10df-437e-96da-34c0889a9231" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.915684 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7109f13-10df-437e-96da-34c0889a9231" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.916350 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.922042 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.922173 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.924865 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.925100 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.925279 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.926953 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.927190 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.932502 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.958328 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp"] Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.983703 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.983779 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt2cl\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-kube-api-access-wt2cl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.983812 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.983842 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.983873 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.983907 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.983966 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.984020 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.984065 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.984366 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.984452 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.984491 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.984553 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:01 crc kubenswrapper[4916]: I1203 20:04:01.984615 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.090873 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.090950 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.090979 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt2cl\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-kube-api-access-wt2cl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.091000 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.091021 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.091037 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.091062 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.091088 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.091125 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.091157 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.091194 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.091217 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.091236 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.091269 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.095100 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.095313 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.095662 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.095883 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.096070 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.096600 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.097155 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.097237 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.098228 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.098526 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.098954 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.099826 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.108264 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt2cl\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-kube-api-access-wt2cl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.110097 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.233985 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.768291 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp"] Dec 03 20:04:02 crc kubenswrapper[4916]: I1203 20:04:02.789208 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" event={"ID":"c94bfa68-5e27-47fe-a55e-b05abead70ac","Type":"ContainerStarted","Data":"6056b9a2813e60dc5416fb4553ad4566761f6570b9acd75eacae8eba64ffa5af"} Dec 03 20:04:03 crc kubenswrapper[4916]: I1203 20:04:03.802920 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" event={"ID":"c94bfa68-5e27-47fe-a55e-b05abead70ac","Type":"ContainerStarted","Data":"d0774d87490bfcb6e859e8c03f29ebd8ab4f8e47ded5589eac30373bf4262127"} Dec 03 20:04:03 crc kubenswrapper[4916]: I1203 20:04:03.835632 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" podStartSLOduration=2.299401137 podStartE2EDuration="2.835605799s" podCreationTimestamp="2025-12-03 20:04:01 +0000 UTC" firstStartedPulling="2025-12-03 20:04:02.770219943 +0000 UTC m=+2058.733030209" lastFinishedPulling="2025-12-03 20:04:03.306424565 +0000 UTC m=+2059.269234871" observedRunningTime="2025-12-03 20:04:03.834934591 +0000 UTC m=+2059.797744867" watchObservedRunningTime="2025-12-03 20:04:03.835605799 +0000 UTC m=+2059.798416085" Dec 03 20:04:53 crc kubenswrapper[4916]: I1203 20:04:53.331882 4916 generic.go:334] "Generic (PLEG): container finished" podID="c94bfa68-5e27-47fe-a55e-b05abead70ac" containerID="d0774d87490bfcb6e859e8c03f29ebd8ab4f8e47ded5589eac30373bf4262127" exitCode=0 Dec 03 20:04:53 crc kubenswrapper[4916]: I1203 20:04:53.332030 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" event={"ID":"c94bfa68-5e27-47fe-a55e-b05abead70ac","Type":"ContainerDied","Data":"d0774d87490bfcb6e859e8c03f29ebd8ab4f8e47ded5589eac30373bf4262127"} Dec 03 20:04:54 crc kubenswrapper[4916]: I1203 20:04:54.850845 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.997586 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ssh-key\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.997678 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-nova-combined-ca-bundle\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.997748 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-inventory\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.997791 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-bootstrap-combined-ca-bundle\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.997836 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-repo-setup-combined-ca-bundle\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.997912 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-neutron-metadata-combined-ca-bundle\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.997961 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.998042 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-libvirt-combined-ca-bundle\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.998155 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.998226 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-ovn-default-certs-0\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.998280 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wt2cl\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-kube-api-access-wt2cl\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.998357 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ovn-combined-ca-bundle\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.998406 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-telemetry-combined-ca-bundle\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:54.998454 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"c94bfa68-5e27-47fe-a55e-b05abead70ac\" (UID: \"c94bfa68-5e27-47fe-a55e-b05abead70ac\") " Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.006786 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.007065 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.007603 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.007679 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.007684 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.008201 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.009010 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.009412 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.009558 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.009963 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.011011 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.011758 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-kube-api-access-wt2cl" (OuterVolumeSpecName: "kube-api-access-wt2cl") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "kube-api-access-wt2cl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.052883 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-inventory" (OuterVolumeSpecName: "inventory") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.058941 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c94bfa68-5e27-47fe-a55e-b05abead70ac" (UID: "c94bfa68-5e27-47fe-a55e-b05abead70ac"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119590 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119621 4916 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119633 4916 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119642 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119652 4916 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119661 4916 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119671 4916 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119682 4916 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119691 4916 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119701 4916 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119709 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wt2cl\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-kube-api-access-wt2cl\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119718 4916 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119726 4916 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c94bfa68-5e27-47fe-a55e-b05abead70ac-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.119734 4916 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c94bfa68-5e27-47fe-a55e-b05abead70ac-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.359963 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" event={"ID":"c94bfa68-5e27-47fe-a55e-b05abead70ac","Type":"ContainerDied","Data":"6056b9a2813e60dc5416fb4553ad4566761f6570b9acd75eacae8eba64ffa5af"} Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.360025 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6056b9a2813e60dc5416fb4553ad4566761f6570b9acd75eacae8eba64ffa5af" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.360111 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.465736 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr"] Dec 03 20:04:55 crc kubenswrapper[4916]: E1203 20:04:55.466381 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c94bfa68-5e27-47fe-a55e-b05abead70ac" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.466416 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="c94bfa68-5e27-47fe-a55e-b05abead70ac" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.466753 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="c94bfa68-5e27-47fe-a55e-b05abead70ac" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.467756 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.471901 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.471947 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.471913 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.472134 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.474743 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.479075 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr"] Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.529835 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.529920 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d7bp\" (UniqueName: \"kubernetes.io/projected/49da2e3d-9d45-478a-b073-beb7a5ca51ae-kube-api-access-9d7bp\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.530058 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.530079 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.530121 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.634178 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.634504 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d7bp\" (UniqueName: \"kubernetes.io/projected/49da2e3d-9d45-478a-b073-beb7a5ca51ae-kube-api-access-9d7bp\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.634676 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.634706 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.634796 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.635827 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.639406 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.644382 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.648160 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.661358 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d7bp\" (UniqueName: \"kubernetes.io/projected/49da2e3d-9d45-478a-b073-beb7a5ca51ae-kube-api-access-9d7bp\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qv9hr\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:55 crc kubenswrapper[4916]: I1203 20:04:55.806820 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.368700 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr"] Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.782013 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cbnjv"] Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.784646 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.799283 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cbnjv"] Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.863846 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvbdp\" (UniqueName: \"kubernetes.io/projected/6c823b6c-b1d9-462a-82a6-79c050eed6a5-kube-api-access-gvbdp\") pod \"redhat-marketplace-cbnjv\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.864011 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-catalog-content\") pod \"redhat-marketplace-cbnjv\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.864107 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-utilities\") pod \"redhat-marketplace-cbnjv\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.966412 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-utilities\") pod \"redhat-marketplace-cbnjv\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.966753 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvbdp\" (UniqueName: \"kubernetes.io/projected/6c823b6c-b1d9-462a-82a6-79c050eed6a5-kube-api-access-gvbdp\") pod \"redhat-marketplace-cbnjv\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.966821 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-catalog-content\") pod \"redhat-marketplace-cbnjv\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.967075 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-utilities\") pod \"redhat-marketplace-cbnjv\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.967187 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-catalog-content\") pod \"redhat-marketplace-cbnjv\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:04:56 crc kubenswrapper[4916]: I1203 20:04:56.984398 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvbdp\" (UniqueName: \"kubernetes.io/projected/6c823b6c-b1d9-462a-82a6-79c050eed6a5-kube-api-access-gvbdp\") pod \"redhat-marketplace-cbnjv\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:04:57 crc kubenswrapper[4916]: I1203 20:04:57.114756 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:04:57 crc kubenswrapper[4916]: I1203 20:04:57.377455 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" event={"ID":"49da2e3d-9d45-478a-b073-beb7a5ca51ae","Type":"ContainerStarted","Data":"69c42c0e0c189aa61f5c50732c049c7d0a50dd98fcb9bc90fa0ecfe6bbd3017a"} Dec 03 20:04:57 crc kubenswrapper[4916]: I1203 20:04:57.377835 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" event={"ID":"49da2e3d-9d45-478a-b073-beb7a5ca51ae","Type":"ContainerStarted","Data":"ddbc2c594f35a52e69f112537802f3f1a1e05d7e82010a5af809d339221ad20b"} Dec 03 20:04:57 crc kubenswrapper[4916]: I1203 20:04:57.401308 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" podStartSLOduration=1.897457019 podStartE2EDuration="2.401284371s" podCreationTimestamp="2025-12-03 20:04:55 +0000 UTC" firstStartedPulling="2025-12-03 20:04:56.378827029 +0000 UTC m=+2112.341637295" lastFinishedPulling="2025-12-03 20:04:56.882654381 +0000 UTC m=+2112.845464647" observedRunningTime="2025-12-03 20:04:57.392613927 +0000 UTC m=+2113.355424203" watchObservedRunningTime="2025-12-03 20:04:57.401284371 +0000 UTC m=+2113.364094637" Dec 03 20:04:57 crc kubenswrapper[4916]: I1203 20:04:57.618139 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cbnjv"] Dec 03 20:04:57 crc kubenswrapper[4916]: W1203 20:04:57.622087 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c823b6c_b1d9_462a_82a6_79c050eed6a5.slice/crio-646f7fd2fbaf71901946b644a46c69bba9defdc04a355f1c1aec6be21b121c19 WatchSource:0}: Error finding container 646f7fd2fbaf71901946b644a46c69bba9defdc04a355f1c1aec6be21b121c19: Status 404 returned error can't find the container with id 646f7fd2fbaf71901946b644a46c69bba9defdc04a355f1c1aec6be21b121c19 Dec 03 20:04:58 crc kubenswrapper[4916]: I1203 20:04:58.389290 4916 generic.go:334] "Generic (PLEG): container finished" podID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" containerID="062002d9ae9dddbb812dd2e03d17028c0bae63fc083345ffbb4228dd0133ab9d" exitCode=0 Dec 03 20:04:58 crc kubenswrapper[4916]: I1203 20:04:58.390300 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbnjv" event={"ID":"6c823b6c-b1d9-462a-82a6-79c050eed6a5","Type":"ContainerDied","Data":"062002d9ae9dddbb812dd2e03d17028c0bae63fc083345ffbb4228dd0133ab9d"} Dec 03 20:04:58 crc kubenswrapper[4916]: I1203 20:04:58.390334 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbnjv" event={"ID":"6c823b6c-b1d9-462a-82a6-79c050eed6a5","Type":"ContainerStarted","Data":"646f7fd2fbaf71901946b644a46c69bba9defdc04a355f1c1aec6be21b121c19"} Dec 03 20:04:58 crc kubenswrapper[4916]: I1203 20:04:58.982920 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5xvh7"] Dec 03 20:04:58 crc kubenswrapper[4916]: I1203 20:04:58.986716 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.007849 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-catalog-content\") pod \"certified-operators-5xvh7\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.007919 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-utilities\") pod \"certified-operators-5xvh7\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.007964 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzlpn\" (UniqueName: \"kubernetes.io/projected/abb4e81c-4dee-4961-9435-d4a0e0d4a206-kube-api-access-qzlpn\") pod \"certified-operators-5xvh7\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.013371 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5xvh7"] Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.109812 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzlpn\" (UniqueName: \"kubernetes.io/projected/abb4e81c-4dee-4961-9435-d4a0e0d4a206-kube-api-access-qzlpn\") pod \"certified-operators-5xvh7\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.109916 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-catalog-content\") pod \"certified-operators-5xvh7\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.109966 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-utilities\") pod \"certified-operators-5xvh7\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.110354 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-utilities\") pod \"certified-operators-5xvh7\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.110554 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-catalog-content\") pod \"certified-operators-5xvh7\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.137729 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzlpn\" (UniqueName: \"kubernetes.io/projected/abb4e81c-4dee-4961-9435-d4a0e0d4a206-kube-api-access-qzlpn\") pod \"certified-operators-5xvh7\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.384856 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.401189 4916 generic.go:334] "Generic (PLEG): container finished" podID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" containerID="237592b6452acf6ee61b5addb6a30d89f10e0d53f03bf2c4ccfb5d0dd14d119a" exitCode=0 Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.401236 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbnjv" event={"ID":"6c823b6c-b1d9-462a-82a6-79c050eed6a5","Type":"ContainerDied","Data":"237592b6452acf6ee61b5addb6a30d89f10e0d53f03bf2c4ccfb5d0dd14d119a"} Dec 03 20:04:59 crc kubenswrapper[4916]: I1203 20:04:59.950800 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5xvh7"] Dec 03 20:04:59 crc kubenswrapper[4916]: W1203 20:04:59.953160 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podabb4e81c_4dee_4961_9435_d4a0e0d4a206.slice/crio-c4a1c8d5128579b95575fa7b8d04f1a2e28efe870d85861f7fb6edb667901851 WatchSource:0}: Error finding container c4a1c8d5128579b95575fa7b8d04f1a2e28efe870d85861f7fb6edb667901851: Status 404 returned error can't find the container with id c4a1c8d5128579b95575fa7b8d04f1a2e28efe870d85861f7fb6edb667901851 Dec 03 20:05:00 crc kubenswrapper[4916]: I1203 20:05:00.416040 4916 generic.go:334] "Generic (PLEG): container finished" podID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" containerID="b17dc966ef7714a738caffc5256b4ac0e333d1d14084bbe89295ff173add3db9" exitCode=0 Dec 03 20:05:00 crc kubenswrapper[4916]: I1203 20:05:00.416087 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xvh7" event={"ID":"abb4e81c-4dee-4961-9435-d4a0e0d4a206","Type":"ContainerDied","Data":"b17dc966ef7714a738caffc5256b4ac0e333d1d14084bbe89295ff173add3db9"} Dec 03 20:05:00 crc kubenswrapper[4916]: I1203 20:05:00.416342 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xvh7" event={"ID":"abb4e81c-4dee-4961-9435-d4a0e0d4a206","Type":"ContainerStarted","Data":"c4a1c8d5128579b95575fa7b8d04f1a2e28efe870d85861f7fb6edb667901851"} Dec 03 20:05:01 crc kubenswrapper[4916]: I1203 20:05:01.427382 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbnjv" event={"ID":"6c823b6c-b1d9-462a-82a6-79c050eed6a5","Type":"ContainerStarted","Data":"b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa"} Dec 03 20:05:01 crc kubenswrapper[4916]: I1203 20:05:01.432923 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xvh7" event={"ID":"abb4e81c-4dee-4961-9435-d4a0e0d4a206","Type":"ContainerStarted","Data":"43eb33ecbb0b51bfb9bae6de3186eb7324ce82b976e87e9ab27fdcc37bcc913f"} Dec 03 20:05:01 crc kubenswrapper[4916]: I1203 20:05:01.465841 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cbnjv" podStartSLOduration=3.6177953880000002 podStartE2EDuration="5.465824195s" podCreationTimestamp="2025-12-03 20:04:56 +0000 UTC" firstStartedPulling="2025-12-03 20:04:58.391823353 +0000 UTC m=+2114.354633629" lastFinishedPulling="2025-12-03 20:05:00.23985216 +0000 UTC m=+2116.202662436" observedRunningTime="2025-12-03 20:05:01.45744867 +0000 UTC m=+2117.420258936" watchObservedRunningTime="2025-12-03 20:05:01.465824195 +0000 UTC m=+2117.428634461" Dec 03 20:05:02 crc kubenswrapper[4916]: I1203 20:05:02.443015 4916 generic.go:334] "Generic (PLEG): container finished" podID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" containerID="43eb33ecbb0b51bfb9bae6de3186eb7324ce82b976e87e9ab27fdcc37bcc913f" exitCode=0 Dec 03 20:05:02 crc kubenswrapper[4916]: I1203 20:05:02.443083 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xvh7" event={"ID":"abb4e81c-4dee-4961-9435-d4a0e0d4a206","Type":"ContainerDied","Data":"43eb33ecbb0b51bfb9bae6de3186eb7324ce82b976e87e9ab27fdcc37bcc913f"} Dec 03 20:05:02 crc kubenswrapper[4916]: I1203 20:05:02.443426 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xvh7" event={"ID":"abb4e81c-4dee-4961-9435-d4a0e0d4a206","Type":"ContainerStarted","Data":"45878fb849332755b37faf3cdf51b19a296217271d80f29d308a2544ec20ad45"} Dec 03 20:05:02 crc kubenswrapper[4916]: I1203 20:05:02.467901 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5xvh7" podStartSLOduration=2.925097102 podStartE2EDuration="4.467879877s" podCreationTimestamp="2025-12-03 20:04:58 +0000 UTC" firstStartedPulling="2025-12-03 20:05:00.420599972 +0000 UTC m=+2116.383410238" lastFinishedPulling="2025-12-03 20:05:01.963382737 +0000 UTC m=+2117.926193013" observedRunningTime="2025-12-03 20:05:02.459904733 +0000 UTC m=+2118.422715009" watchObservedRunningTime="2025-12-03 20:05:02.467879877 +0000 UTC m=+2118.430690143" Dec 03 20:05:07 crc kubenswrapper[4916]: I1203 20:05:07.115684 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:05:07 crc kubenswrapper[4916]: I1203 20:05:07.117646 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:05:07 crc kubenswrapper[4916]: I1203 20:05:07.190303 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:05:07 crc kubenswrapper[4916]: I1203 20:05:07.584882 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:05:07 crc kubenswrapper[4916]: I1203 20:05:07.662010 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cbnjv"] Dec 03 20:05:09 crc kubenswrapper[4916]: I1203 20:05:09.385782 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:05:09 crc kubenswrapper[4916]: I1203 20:05:09.386111 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:05:09 crc kubenswrapper[4916]: I1203 20:05:09.458538 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:05:09 crc kubenswrapper[4916]: I1203 20:05:09.523881 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cbnjv" podUID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" containerName="registry-server" containerID="cri-o://b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa" gracePeriod=2 Dec 03 20:05:09 crc kubenswrapper[4916]: I1203 20:05:09.595142 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.045744 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.135293 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-utilities\") pod \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.135389 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvbdp\" (UniqueName: \"kubernetes.io/projected/6c823b6c-b1d9-462a-82a6-79c050eed6a5-kube-api-access-gvbdp\") pod \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.135468 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-catalog-content\") pod \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\" (UID: \"6c823b6c-b1d9-462a-82a6-79c050eed6a5\") " Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.137046 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-utilities" (OuterVolumeSpecName: "utilities") pod "6c823b6c-b1d9-462a-82a6-79c050eed6a5" (UID: "6c823b6c-b1d9-462a-82a6-79c050eed6a5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.143703 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c823b6c-b1d9-462a-82a6-79c050eed6a5-kube-api-access-gvbdp" (OuterVolumeSpecName: "kube-api-access-gvbdp") pod "6c823b6c-b1d9-462a-82a6-79c050eed6a5" (UID: "6c823b6c-b1d9-462a-82a6-79c050eed6a5"). InnerVolumeSpecName "kube-api-access-gvbdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.178805 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c823b6c-b1d9-462a-82a6-79c050eed6a5" (UID: "6c823b6c-b1d9-462a-82a6-79c050eed6a5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.237498 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.237545 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvbdp\" (UniqueName: \"kubernetes.io/projected/6c823b6c-b1d9-462a-82a6-79c050eed6a5-kube-api-access-gvbdp\") on node \"crc\" DevicePath \"\"" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.237583 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c823b6c-b1d9-462a-82a6-79c050eed6a5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.536949 4916 generic.go:334] "Generic (PLEG): container finished" podID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" containerID="b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa" exitCode=0 Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.537012 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbnjv" event={"ID":"6c823b6c-b1d9-462a-82a6-79c050eed6a5","Type":"ContainerDied","Data":"b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa"} Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.537092 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cbnjv" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.537189 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cbnjv" event={"ID":"6c823b6c-b1d9-462a-82a6-79c050eed6a5","Type":"ContainerDied","Data":"646f7fd2fbaf71901946b644a46c69bba9defdc04a355f1c1aec6be21b121c19"} Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.537221 4916 scope.go:117] "RemoveContainer" containerID="b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.571418 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cbnjv"] Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.575695 4916 scope.go:117] "RemoveContainer" containerID="237592b6452acf6ee61b5addb6a30d89f10e0d53f03bf2c4ccfb5d0dd14d119a" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.584055 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cbnjv"] Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.596308 4916 scope.go:117] "RemoveContainer" containerID="062002d9ae9dddbb812dd2e03d17028c0bae63fc083345ffbb4228dd0133ab9d" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.637300 4916 scope.go:117] "RemoveContainer" containerID="b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa" Dec 03 20:05:10 crc kubenswrapper[4916]: E1203 20:05:10.637782 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa\": container with ID starting with b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa not found: ID does not exist" containerID="b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.637846 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa"} err="failed to get container status \"b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa\": rpc error: code = NotFound desc = could not find container \"b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa\": container with ID starting with b9a0ecf001b7fad653eecf1b11a306e946c873c32c8150489ebc2cd7d31a9efa not found: ID does not exist" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.637875 4916 scope.go:117] "RemoveContainer" containerID="237592b6452acf6ee61b5addb6a30d89f10e0d53f03bf2c4ccfb5d0dd14d119a" Dec 03 20:05:10 crc kubenswrapper[4916]: E1203 20:05:10.638156 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"237592b6452acf6ee61b5addb6a30d89f10e0d53f03bf2c4ccfb5d0dd14d119a\": container with ID starting with 237592b6452acf6ee61b5addb6a30d89f10e0d53f03bf2c4ccfb5d0dd14d119a not found: ID does not exist" containerID="237592b6452acf6ee61b5addb6a30d89f10e0d53f03bf2c4ccfb5d0dd14d119a" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.638187 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"237592b6452acf6ee61b5addb6a30d89f10e0d53f03bf2c4ccfb5d0dd14d119a"} err="failed to get container status \"237592b6452acf6ee61b5addb6a30d89f10e0d53f03bf2c4ccfb5d0dd14d119a\": rpc error: code = NotFound desc = could not find container \"237592b6452acf6ee61b5addb6a30d89f10e0d53f03bf2c4ccfb5d0dd14d119a\": container with ID starting with 237592b6452acf6ee61b5addb6a30d89f10e0d53f03bf2c4ccfb5d0dd14d119a not found: ID does not exist" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.638208 4916 scope.go:117] "RemoveContainer" containerID="062002d9ae9dddbb812dd2e03d17028c0bae63fc083345ffbb4228dd0133ab9d" Dec 03 20:05:10 crc kubenswrapper[4916]: E1203 20:05:10.638444 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"062002d9ae9dddbb812dd2e03d17028c0bae63fc083345ffbb4228dd0133ab9d\": container with ID starting with 062002d9ae9dddbb812dd2e03d17028c0bae63fc083345ffbb4228dd0133ab9d not found: ID does not exist" containerID="062002d9ae9dddbb812dd2e03d17028c0bae63fc083345ffbb4228dd0133ab9d" Dec 03 20:05:10 crc kubenswrapper[4916]: I1203 20:05:10.638470 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"062002d9ae9dddbb812dd2e03d17028c0bae63fc083345ffbb4228dd0133ab9d"} err="failed to get container status \"062002d9ae9dddbb812dd2e03d17028c0bae63fc083345ffbb4228dd0133ab9d\": rpc error: code = NotFound desc = could not find container \"062002d9ae9dddbb812dd2e03d17028c0bae63fc083345ffbb4228dd0133ab9d\": container with ID starting with 062002d9ae9dddbb812dd2e03d17028c0bae63fc083345ffbb4228dd0133ab9d not found: ID does not exist" Dec 03 20:05:11 crc kubenswrapper[4916]: I1203 20:05:11.761458 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5xvh7"] Dec 03 20:05:11 crc kubenswrapper[4916]: I1203 20:05:11.762021 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5xvh7" podUID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" containerName="registry-server" containerID="cri-o://45878fb849332755b37faf3cdf51b19a296217271d80f29d308a2544ec20ad45" gracePeriod=2 Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.497310 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" path="/var/lib/kubelet/pods/6c823b6c-b1d9-462a-82a6-79c050eed6a5/volumes" Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.564400 4916 generic.go:334] "Generic (PLEG): container finished" podID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" containerID="45878fb849332755b37faf3cdf51b19a296217271d80f29d308a2544ec20ad45" exitCode=0 Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.564467 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xvh7" event={"ID":"abb4e81c-4dee-4961-9435-d4a0e0d4a206","Type":"ContainerDied","Data":"45878fb849332755b37faf3cdf51b19a296217271d80f29d308a2544ec20ad45"} Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.778685 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.886402 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzlpn\" (UniqueName: \"kubernetes.io/projected/abb4e81c-4dee-4961-9435-d4a0e0d4a206-kube-api-access-qzlpn\") pod \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.886490 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-catalog-content\") pod \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.886542 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-utilities\") pod \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\" (UID: \"abb4e81c-4dee-4961-9435-d4a0e0d4a206\") " Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.889849 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-utilities" (OuterVolumeSpecName: "utilities") pod "abb4e81c-4dee-4961-9435-d4a0e0d4a206" (UID: "abb4e81c-4dee-4961-9435-d4a0e0d4a206"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.903464 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abb4e81c-4dee-4961-9435-d4a0e0d4a206-kube-api-access-qzlpn" (OuterVolumeSpecName: "kube-api-access-qzlpn") pod "abb4e81c-4dee-4961-9435-d4a0e0d4a206" (UID: "abb4e81c-4dee-4961-9435-d4a0e0d4a206"). InnerVolumeSpecName "kube-api-access-qzlpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.946403 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "abb4e81c-4dee-4961-9435-d4a0e0d4a206" (UID: "abb4e81c-4dee-4961-9435-d4a0e0d4a206"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.995218 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.995245 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abb4e81c-4dee-4961-9435-d4a0e0d4a206-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:05:12 crc kubenswrapper[4916]: I1203 20:05:12.995255 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzlpn\" (UniqueName: \"kubernetes.io/projected/abb4e81c-4dee-4961-9435-d4a0e0d4a206-kube-api-access-qzlpn\") on node \"crc\" DevicePath \"\"" Dec 03 20:05:13 crc kubenswrapper[4916]: I1203 20:05:13.578672 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5xvh7" event={"ID":"abb4e81c-4dee-4961-9435-d4a0e0d4a206","Type":"ContainerDied","Data":"c4a1c8d5128579b95575fa7b8d04f1a2e28efe870d85861f7fb6edb667901851"} Dec 03 20:05:13 crc kubenswrapper[4916]: I1203 20:05:13.578731 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5xvh7" Dec 03 20:05:13 crc kubenswrapper[4916]: I1203 20:05:13.579091 4916 scope.go:117] "RemoveContainer" containerID="45878fb849332755b37faf3cdf51b19a296217271d80f29d308a2544ec20ad45" Dec 03 20:05:13 crc kubenswrapper[4916]: I1203 20:05:13.636641 4916 scope.go:117] "RemoveContainer" containerID="43eb33ecbb0b51bfb9bae6de3186eb7324ce82b976e87e9ab27fdcc37bcc913f" Dec 03 20:05:13 crc kubenswrapper[4916]: I1203 20:05:13.642078 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5xvh7"] Dec 03 20:05:13 crc kubenswrapper[4916]: I1203 20:05:13.655072 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5xvh7"] Dec 03 20:05:13 crc kubenswrapper[4916]: I1203 20:05:13.677801 4916 scope.go:117] "RemoveContainer" containerID="b17dc966ef7714a738caffc5256b4ac0e333d1d14084bbe89295ff173add3db9" Dec 03 20:05:14 crc kubenswrapper[4916]: I1203 20:05:14.497648 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" path="/var/lib/kubelet/pods/abb4e81c-4dee-4961-9435-d4a0e0d4a206/volumes" Dec 03 20:05:16 crc kubenswrapper[4916]: I1203 20:05:16.158969 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:05:16 crc kubenswrapper[4916]: I1203 20:05:16.159270 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:05:46 crc kubenswrapper[4916]: I1203 20:05:46.159299 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:05:46 crc kubenswrapper[4916]: I1203 20:05:46.160091 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:06:13 crc kubenswrapper[4916]: I1203 20:06:13.263365 4916 generic.go:334] "Generic (PLEG): container finished" podID="49da2e3d-9d45-478a-b073-beb7a5ca51ae" containerID="69c42c0e0c189aa61f5c50732c049c7d0a50dd98fcb9bc90fa0ecfe6bbd3017a" exitCode=0 Dec 03 20:06:13 crc kubenswrapper[4916]: I1203 20:06:13.264074 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" event={"ID":"49da2e3d-9d45-478a-b073-beb7a5ca51ae","Type":"ContainerDied","Data":"69c42c0e0c189aa61f5c50732c049c7d0a50dd98fcb9bc90fa0ecfe6bbd3017a"} Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.669313 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.825601 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovn-combined-ca-bundle\") pod \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.825701 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-inventory\") pod \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.825778 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9d7bp\" (UniqueName: \"kubernetes.io/projected/49da2e3d-9d45-478a-b073-beb7a5ca51ae-kube-api-access-9d7bp\") pod \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.825949 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovncontroller-config-0\") pod \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.826115 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ssh-key\") pod \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\" (UID: \"49da2e3d-9d45-478a-b073-beb7a5ca51ae\") " Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.832302 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "49da2e3d-9d45-478a-b073-beb7a5ca51ae" (UID: "49da2e3d-9d45-478a-b073-beb7a5ca51ae"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.833775 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49da2e3d-9d45-478a-b073-beb7a5ca51ae-kube-api-access-9d7bp" (OuterVolumeSpecName: "kube-api-access-9d7bp") pod "49da2e3d-9d45-478a-b073-beb7a5ca51ae" (UID: "49da2e3d-9d45-478a-b073-beb7a5ca51ae"). InnerVolumeSpecName "kube-api-access-9d7bp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.873018 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "49da2e3d-9d45-478a-b073-beb7a5ca51ae" (UID: "49da2e3d-9d45-478a-b073-beb7a5ca51ae"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.875322 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "49da2e3d-9d45-478a-b073-beb7a5ca51ae" (UID: "49da2e3d-9d45-478a-b073-beb7a5ca51ae"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.884530 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-inventory" (OuterVolumeSpecName: "inventory") pod "49da2e3d-9d45-478a-b073-beb7a5ca51ae" (UID: "49da2e3d-9d45-478a-b073-beb7a5ca51ae"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.929158 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.929215 4916 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.929228 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/49da2e3d-9d45-478a-b073-beb7a5ca51ae-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.929243 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9d7bp\" (UniqueName: \"kubernetes.io/projected/49da2e3d-9d45-478a-b073-beb7a5ca51ae-kube-api-access-9d7bp\") on node \"crc\" DevicePath \"\"" Dec 03 20:06:14 crc kubenswrapper[4916]: I1203 20:06:14.929278 4916 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/49da2e3d-9d45-478a-b073-beb7a5ca51ae-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.295736 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" event={"ID":"49da2e3d-9d45-478a-b073-beb7a5ca51ae","Type":"ContainerDied","Data":"ddbc2c594f35a52e69f112537802f3f1a1e05d7e82010a5af809d339221ad20b"} Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.296108 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ddbc2c594f35a52e69f112537802f3f1a1e05d7e82010a5af809d339221ad20b" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.295768 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qv9hr" Dec 03 20:06:15 crc kubenswrapper[4916]: E1203 20:06:15.371961 4916 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49da2e3d_9d45_478a_b073_beb7a5ca51ae.slice\": RecentStats: unable to find data in memory cache]" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.446547 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4"] Dec 03 20:06:15 crc kubenswrapper[4916]: E1203 20:06:15.450346 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" containerName="extract-content" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.450400 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" containerName="extract-content" Dec 03 20:06:15 crc kubenswrapper[4916]: E1203 20:06:15.450420 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" containerName="registry-server" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.450429 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" containerName="registry-server" Dec 03 20:06:15 crc kubenswrapper[4916]: E1203 20:06:15.450449 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" containerName="registry-server" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.450459 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" containerName="registry-server" Dec 03 20:06:15 crc kubenswrapper[4916]: E1203 20:06:15.450479 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" containerName="extract-utilities" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.450488 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" containerName="extract-utilities" Dec 03 20:06:15 crc kubenswrapper[4916]: E1203 20:06:15.450507 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49da2e3d-9d45-478a-b073-beb7a5ca51ae" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.450515 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="49da2e3d-9d45-478a-b073-beb7a5ca51ae" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 20:06:15 crc kubenswrapper[4916]: E1203 20:06:15.450539 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" containerName="extract-content" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.450547 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" containerName="extract-content" Dec 03 20:06:15 crc kubenswrapper[4916]: E1203 20:06:15.450586 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" containerName="extract-utilities" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.450594 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" containerName="extract-utilities" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.450825 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c823b6c-b1d9-462a-82a6-79c050eed6a5" containerName="registry-server" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.450859 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="abb4e81c-4dee-4961-9435-d4a0e0d4a206" containerName="registry-server" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.450873 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="49da2e3d-9d45-478a-b073-beb7a5ca51ae" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.451690 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.454328 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.455107 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.456030 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.456208 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.456339 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.456552 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.476814 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4"] Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.544915 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx9kh\" (UniqueName: \"kubernetes.io/projected/257d92ae-6326-4650-830c-b29ed36146e7-kube-api-access-mx9kh\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.545043 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.545075 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.545115 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.545159 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.545188 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.647352 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.647421 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.647497 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.647560 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.647629 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.648000 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx9kh\" (UniqueName: \"kubernetes.io/projected/257d92ae-6326-4650-830c-b29ed36146e7-kube-api-access-mx9kh\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.651999 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.652627 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.653370 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.661765 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.666372 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.678706 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx9kh\" (UniqueName: \"kubernetes.io/projected/257d92ae-6326-4650-830c-b29ed36146e7-kube-api-access-mx9kh\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:15 crc kubenswrapper[4916]: I1203 20:06:15.812971 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:06:16 crc kubenswrapper[4916]: I1203 20:06:16.158585 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:06:16 crc kubenswrapper[4916]: I1203 20:06:16.158846 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:06:16 crc kubenswrapper[4916]: I1203 20:06:16.158888 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 20:06:16 crc kubenswrapper[4916]: I1203 20:06:16.159305 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 20:06:16 crc kubenswrapper[4916]: I1203 20:06:16.159354 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" gracePeriod=600 Dec 03 20:06:16 crc kubenswrapper[4916]: I1203 20:06:16.192772 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4"] Dec 03 20:06:16 crc kubenswrapper[4916]: E1203 20:06:16.284839 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:06:16 crc kubenswrapper[4916]: I1203 20:06:16.304947 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" exitCode=0 Dec 03 20:06:16 crc kubenswrapper[4916]: I1203 20:06:16.305001 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab"} Dec 03 20:06:16 crc kubenswrapper[4916]: I1203 20:06:16.305030 4916 scope.go:117] "RemoveContainer" containerID="e13c1fbfb649ac0f9340740b2db42f1899368837b532c074802cbbdf37483fd4" Dec 03 20:06:16 crc kubenswrapper[4916]: I1203 20:06:16.305643 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:06:16 crc kubenswrapper[4916]: E1203 20:06:16.305946 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:06:16 crc kubenswrapper[4916]: I1203 20:06:16.307759 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" event={"ID":"257d92ae-6326-4650-830c-b29ed36146e7","Type":"ContainerStarted","Data":"9df92468d9f87bbc80d79ff80482c8b4b313e2b54ca539fed1dc4178c653b4cb"} Dec 03 20:06:20 crc kubenswrapper[4916]: I1203 20:06:20.356711 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" event={"ID":"257d92ae-6326-4650-830c-b29ed36146e7","Type":"ContainerStarted","Data":"58e0a966e8c6753502f9c31613c44157c96f547ffa7efc888463235320110e35"} Dec 03 20:06:20 crc kubenswrapper[4916]: I1203 20:06:20.396939 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" podStartSLOduration=2.357471553 podStartE2EDuration="5.396916722s" podCreationTimestamp="2025-12-03 20:06:15 +0000 UTC" firstStartedPulling="2025-12-03 20:06:16.199058609 +0000 UTC m=+2192.161868875" lastFinishedPulling="2025-12-03 20:06:19.238503778 +0000 UTC m=+2195.201314044" observedRunningTime="2025-12-03 20:06:20.394924029 +0000 UTC m=+2196.357734335" watchObservedRunningTime="2025-12-03 20:06:20.396916722 +0000 UTC m=+2196.359727028" Dec 03 20:06:27 crc kubenswrapper[4916]: I1203 20:06:27.478525 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:06:27 crc kubenswrapper[4916]: E1203 20:06:27.479762 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:06:41 crc kubenswrapper[4916]: I1203 20:06:41.477940 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:06:41 crc kubenswrapper[4916]: E1203 20:06:41.478927 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:06:54 crc kubenswrapper[4916]: I1203 20:06:54.487669 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:06:54 crc kubenswrapper[4916]: E1203 20:06:54.489911 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:07:05 crc kubenswrapper[4916]: I1203 20:07:05.479021 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:07:05 crc kubenswrapper[4916]: E1203 20:07:05.480061 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:07:14 crc kubenswrapper[4916]: I1203 20:07:14.000937 4916 generic.go:334] "Generic (PLEG): container finished" podID="257d92ae-6326-4650-830c-b29ed36146e7" containerID="58e0a966e8c6753502f9c31613c44157c96f547ffa7efc888463235320110e35" exitCode=0 Dec 03 20:07:14 crc kubenswrapper[4916]: I1203 20:07:14.001053 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" event={"ID":"257d92ae-6326-4650-830c-b29ed36146e7","Type":"ContainerDied","Data":"58e0a966e8c6753502f9c31613c44157c96f547ffa7efc888463235320110e35"} Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.507637 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.558029 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-nova-metadata-neutron-config-0\") pod \"257d92ae-6326-4650-830c-b29ed36146e7\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.558122 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-metadata-combined-ca-bundle\") pod \"257d92ae-6326-4650-830c-b29ed36146e7\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.558185 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-inventory\") pod \"257d92ae-6326-4650-830c-b29ed36146e7\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.558211 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mx9kh\" (UniqueName: \"kubernetes.io/projected/257d92ae-6326-4650-830c-b29ed36146e7-kube-api-access-mx9kh\") pod \"257d92ae-6326-4650-830c-b29ed36146e7\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.558282 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-ovn-metadata-agent-neutron-config-0\") pod \"257d92ae-6326-4650-830c-b29ed36146e7\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.558414 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-ssh-key\") pod \"257d92ae-6326-4650-830c-b29ed36146e7\" (UID: \"257d92ae-6326-4650-830c-b29ed36146e7\") " Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.578513 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "257d92ae-6326-4650-830c-b29ed36146e7" (UID: "257d92ae-6326-4650-830c-b29ed36146e7"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.587492 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/257d92ae-6326-4650-830c-b29ed36146e7-kube-api-access-mx9kh" (OuterVolumeSpecName: "kube-api-access-mx9kh") pod "257d92ae-6326-4650-830c-b29ed36146e7" (UID: "257d92ae-6326-4650-830c-b29ed36146e7"). InnerVolumeSpecName "kube-api-access-mx9kh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.624557 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "257d92ae-6326-4650-830c-b29ed36146e7" (UID: "257d92ae-6326-4650-830c-b29ed36146e7"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.624598 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "257d92ae-6326-4650-830c-b29ed36146e7" (UID: "257d92ae-6326-4650-830c-b29ed36146e7"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.625792 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "257d92ae-6326-4650-830c-b29ed36146e7" (UID: "257d92ae-6326-4650-830c-b29ed36146e7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.626136 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-inventory" (OuterVolumeSpecName: "inventory") pod "257d92ae-6326-4650-830c-b29ed36146e7" (UID: "257d92ae-6326-4650-830c-b29ed36146e7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.662948 4916 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.663024 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.663042 4916 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.663058 4916 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.663075 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mx9kh\" (UniqueName: \"kubernetes.io/projected/257d92ae-6326-4650-830c-b29ed36146e7-kube-api-access-mx9kh\") on node \"crc\" DevicePath \"\"" Dec 03 20:07:15 crc kubenswrapper[4916]: I1203 20:07:15.663090 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/257d92ae-6326-4650-830c-b29ed36146e7-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.032911 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" event={"ID":"257d92ae-6326-4650-830c-b29ed36146e7","Type":"ContainerDied","Data":"9df92468d9f87bbc80d79ff80482c8b4b313e2b54ca539fed1dc4178c653b4cb"} Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.032989 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9df92468d9f87bbc80d79ff80482c8b4b313e2b54ca539fed1dc4178c653b4cb" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.033036 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.274040 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8"] Dec 03 20:07:16 crc kubenswrapper[4916]: E1203 20:07:16.274854 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="257d92ae-6326-4650-830c-b29ed36146e7" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.274903 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="257d92ae-6326-4650-830c-b29ed36146e7" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.275474 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="257d92ae-6326-4650-830c-b29ed36146e7" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.276984 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.280910 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.281091 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.281323 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.281345 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.285646 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.287750 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8"] Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.386427 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.386552 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.386639 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrs6m\" (UniqueName: \"kubernetes.io/projected/6c68c375-3a19-46dc-8d30-dd8f6edf361e-kube-api-access-mrs6m\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.386782 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.386837 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.490874 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrs6m\" (UniqueName: \"kubernetes.io/projected/6c68c375-3a19-46dc-8d30-dd8f6edf361e-kube-api-access-mrs6m\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.491005 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.491069 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.491146 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.491171 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.497166 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.497218 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.499546 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.502227 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.514176 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrs6m\" (UniqueName: \"kubernetes.io/projected/6c68c375-3a19-46dc-8d30-dd8f6edf361e-kube-api-access-mrs6m\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:16 crc kubenswrapper[4916]: I1203 20:07:16.609627 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:07:17 crc kubenswrapper[4916]: W1203 20:07:17.235719 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c68c375_3a19_46dc_8d30_dd8f6edf361e.slice/crio-9af9d6c0d5b66539e7b883689ad4010e1cad5c32d5b69b5f732dbabdf190895e WatchSource:0}: Error finding container 9af9d6c0d5b66539e7b883689ad4010e1cad5c32d5b69b5f732dbabdf190895e: Status 404 returned error can't find the container with id 9af9d6c0d5b66539e7b883689ad4010e1cad5c32d5b69b5f732dbabdf190895e Dec 03 20:07:17 crc kubenswrapper[4916]: I1203 20:07:17.236855 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8"] Dec 03 20:07:18 crc kubenswrapper[4916]: I1203 20:07:18.055206 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" event={"ID":"6c68c375-3a19-46dc-8d30-dd8f6edf361e","Type":"ContainerStarted","Data":"8b69fa119d5d733bee2b0abd2dcff711a13efc76b09fb047c24433919db6695c"} Dec 03 20:07:18 crc kubenswrapper[4916]: I1203 20:07:18.055515 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" event={"ID":"6c68c375-3a19-46dc-8d30-dd8f6edf361e","Type":"ContainerStarted","Data":"9af9d6c0d5b66539e7b883689ad4010e1cad5c32d5b69b5f732dbabdf190895e"} Dec 03 20:07:18 crc kubenswrapper[4916]: I1203 20:07:18.076236 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" podStartSLOduration=1.610690692 podStartE2EDuration="2.076221313s" podCreationTimestamp="2025-12-03 20:07:16 +0000 UTC" firstStartedPulling="2025-12-03 20:07:17.239159684 +0000 UTC m=+2253.201969980" lastFinishedPulling="2025-12-03 20:07:17.704690295 +0000 UTC m=+2253.667500601" observedRunningTime="2025-12-03 20:07:18.071274911 +0000 UTC m=+2254.034085187" watchObservedRunningTime="2025-12-03 20:07:18.076221313 +0000 UTC m=+2254.039031589" Dec 03 20:07:19 crc kubenswrapper[4916]: I1203 20:07:19.478147 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:07:19 crc kubenswrapper[4916]: E1203 20:07:19.478843 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:07:34 crc kubenswrapper[4916]: I1203 20:07:34.486475 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:07:34 crc kubenswrapper[4916]: E1203 20:07:34.487309 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:07:46 crc kubenswrapper[4916]: I1203 20:07:46.478904 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:07:46 crc kubenswrapper[4916]: E1203 20:07:46.479920 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:07:57 crc kubenswrapper[4916]: I1203 20:07:57.478227 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:07:57 crc kubenswrapper[4916]: E1203 20:07:57.479264 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:08:11 crc kubenswrapper[4916]: I1203 20:08:11.478820 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:08:11 crc kubenswrapper[4916]: E1203 20:08:11.480547 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:08:25 crc kubenswrapper[4916]: I1203 20:08:25.478309 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:08:25 crc kubenswrapper[4916]: E1203 20:08:25.479218 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:08:36 crc kubenswrapper[4916]: I1203 20:08:36.478800 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:08:36 crc kubenswrapper[4916]: E1203 20:08:36.479771 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:08:51 crc kubenswrapper[4916]: I1203 20:08:51.478847 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:08:51 crc kubenswrapper[4916]: E1203 20:08:51.479944 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:09:05 crc kubenswrapper[4916]: I1203 20:09:05.478043 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:09:05 crc kubenswrapper[4916]: E1203 20:09:05.479296 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:09:20 crc kubenswrapper[4916]: I1203 20:09:20.479445 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:09:20 crc kubenswrapper[4916]: E1203 20:09:20.480915 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:09:32 crc kubenswrapper[4916]: I1203 20:09:32.478702 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:09:32 crc kubenswrapper[4916]: E1203 20:09:32.480282 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:09:46 crc kubenswrapper[4916]: I1203 20:09:46.478069 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:09:46 crc kubenswrapper[4916]: E1203 20:09:46.478877 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:09:59 crc kubenswrapper[4916]: I1203 20:09:59.478547 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:09:59 crc kubenswrapper[4916]: E1203 20:09:59.479234 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:10:12 crc kubenswrapper[4916]: I1203 20:10:12.477832 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:10:12 crc kubenswrapper[4916]: E1203 20:10:12.478771 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:10:27 crc kubenswrapper[4916]: I1203 20:10:27.478048 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:10:27 crc kubenswrapper[4916]: E1203 20:10:27.478867 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:10:38 crc kubenswrapper[4916]: I1203 20:10:38.478652 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:10:38 crc kubenswrapper[4916]: E1203 20:10:38.479440 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:10:51 crc kubenswrapper[4916]: I1203 20:10:51.480002 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:10:51 crc kubenswrapper[4916]: E1203 20:10:51.481446 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:11:02 crc kubenswrapper[4916]: I1203 20:11:02.478995 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:11:02 crc kubenswrapper[4916]: E1203 20:11:02.480164 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:11:15 crc kubenswrapper[4916]: I1203 20:11:15.478359 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:11:15 crc kubenswrapper[4916]: E1203 20:11:15.479648 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:11:30 crc kubenswrapper[4916]: I1203 20:11:30.478594 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:11:30 crc kubenswrapper[4916]: I1203 20:11:30.834077 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"15d5846154948d1852e6ddde861ea5852f573e82e62ac7605b118d9662ac07ae"} Dec 03 20:11:59 crc kubenswrapper[4916]: I1203 20:11:59.142968 4916 generic.go:334] "Generic (PLEG): container finished" podID="6c68c375-3a19-46dc-8d30-dd8f6edf361e" containerID="8b69fa119d5d733bee2b0abd2dcff711a13efc76b09fb047c24433919db6695c" exitCode=0 Dec 03 20:11:59 crc kubenswrapper[4916]: I1203 20:11:59.143792 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" event={"ID":"6c68c375-3a19-46dc-8d30-dd8f6edf361e","Type":"ContainerDied","Data":"8b69fa119d5d733bee2b0abd2dcff711a13efc76b09fb047c24433919db6695c"} Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.562784 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.700780 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-combined-ca-bundle\") pod \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.700892 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-secret-0\") pod \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.700918 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrs6m\" (UniqueName: \"kubernetes.io/projected/6c68c375-3a19-46dc-8d30-dd8f6edf361e-kube-api-access-mrs6m\") pod \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.701041 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-ssh-key\") pod \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.701102 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-inventory\") pod \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\" (UID: \"6c68c375-3a19-46dc-8d30-dd8f6edf361e\") " Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.712138 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "6c68c375-3a19-46dc-8d30-dd8f6edf361e" (UID: "6c68c375-3a19-46dc-8d30-dd8f6edf361e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.713164 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c68c375-3a19-46dc-8d30-dd8f6edf361e-kube-api-access-mrs6m" (OuterVolumeSpecName: "kube-api-access-mrs6m") pod "6c68c375-3a19-46dc-8d30-dd8f6edf361e" (UID: "6c68c375-3a19-46dc-8d30-dd8f6edf361e"). InnerVolumeSpecName "kube-api-access-mrs6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.741142 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "6c68c375-3a19-46dc-8d30-dd8f6edf361e" (UID: "6c68c375-3a19-46dc-8d30-dd8f6edf361e"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.746858 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6c68c375-3a19-46dc-8d30-dd8f6edf361e" (UID: "6c68c375-3a19-46dc-8d30-dd8f6edf361e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.767166 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-inventory" (OuterVolumeSpecName: "inventory") pod "6c68c375-3a19-46dc-8d30-dd8f6edf361e" (UID: "6c68c375-3a19-46dc-8d30-dd8f6edf361e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.804270 4916 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.804711 4916 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.804810 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrs6m\" (UniqueName: \"kubernetes.io/projected/6c68c375-3a19-46dc-8d30-dd8f6edf361e-kube-api-access-mrs6m\") on node \"crc\" DevicePath \"\"" Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.804887 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:12:00 crc kubenswrapper[4916]: I1203 20:12:00.804963 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6c68c375-3a19-46dc-8d30-dd8f6edf361e-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.169154 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" event={"ID":"6c68c375-3a19-46dc-8d30-dd8f6edf361e","Type":"ContainerDied","Data":"9af9d6c0d5b66539e7b883689ad4010e1cad5c32d5b69b5f732dbabdf190895e"} Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.169207 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9af9d6c0d5b66539e7b883689ad4010e1cad5c32d5b69b5f732dbabdf190895e" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.169247 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.289667 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7"] Dec 03 20:12:01 crc kubenswrapper[4916]: E1203 20:12:01.290136 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c68c375-3a19-46dc-8d30-dd8f6edf361e" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.290152 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c68c375-3a19-46dc-8d30-dd8f6edf361e" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.290401 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c68c375-3a19-46dc-8d30-dd8f6edf361e" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.291202 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.295525 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.295717 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.295799 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.295828 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.295925 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.296034 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.296035 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.302150 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7"] Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.415451 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.416079 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.416174 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.416205 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.416240 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm8gk\" (UniqueName: \"kubernetes.io/projected/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-kube-api-access-fm8gk\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.416274 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.416307 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.416402 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.416496 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.518598 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.518715 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.518757 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.518989 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm8gk\" (UniqueName: \"kubernetes.io/projected/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-kube-api-access-fm8gk\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.519675 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.520388 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.520444 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.520512 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.521022 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.524216 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.525877 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.525960 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.526078 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.527921 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.528368 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.530412 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.535166 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.550899 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm8gk\" (UniqueName: \"kubernetes.io/projected/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-kube-api-access-fm8gk\") pod \"nova-edpm-deployment-openstack-edpm-ipam-zk7x7\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:01 crc kubenswrapper[4916]: I1203 20:12:01.614841 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:12:02 crc kubenswrapper[4916]: I1203 20:12:02.205520 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7"] Dec 03 20:12:02 crc kubenswrapper[4916]: I1203 20:12:02.220297 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 20:12:03 crc kubenswrapper[4916]: I1203 20:12:03.204440 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" event={"ID":"8eaccb2f-783d-4da3-90ae-c88fdfef6c86","Type":"ContainerStarted","Data":"6df476009c2575840ac82b200a3419825f1f478b66dc3e9e75e0140d13a16b5b"} Dec 03 20:12:05 crc kubenswrapper[4916]: I1203 20:12:05.228314 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" event={"ID":"8eaccb2f-783d-4da3-90ae-c88fdfef6c86","Type":"ContainerStarted","Data":"02b9993ea8b22e9566cfcb405d06cb71d6be512c838e63a0188dd49d96c4a5ef"} Dec 03 20:12:05 crc kubenswrapper[4916]: I1203 20:12:05.268657 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" podStartSLOduration=2.039067965 podStartE2EDuration="4.268631337s" podCreationTimestamp="2025-12-03 20:12:01 +0000 UTC" firstStartedPulling="2025-12-03 20:12:02.220061333 +0000 UTC m=+2538.182871599" lastFinishedPulling="2025-12-03 20:12:04.449624705 +0000 UTC m=+2540.412434971" observedRunningTime="2025-12-03 20:12:05.250382723 +0000 UTC m=+2541.213193059" watchObservedRunningTime="2025-12-03 20:12:05.268631337 +0000 UTC m=+2541.231441633" Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.420933 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vntf5"] Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.423519 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.433692 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-utilities\") pod \"redhat-operators-vntf5\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.433968 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbzkq\" (UniqueName: \"kubernetes.io/projected/732e9196-4820-4603-ab8f-7701bfed4e7b-kube-api-access-xbzkq\") pod \"redhat-operators-vntf5\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.434074 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-catalog-content\") pod \"redhat-operators-vntf5\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.443792 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vntf5"] Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.536224 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-catalog-content\") pod \"redhat-operators-vntf5\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.536405 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-utilities\") pod \"redhat-operators-vntf5\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.536534 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbzkq\" (UniqueName: \"kubernetes.io/projected/732e9196-4820-4603-ab8f-7701bfed4e7b-kube-api-access-xbzkq\") pod \"redhat-operators-vntf5\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.536984 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-utilities\") pod \"redhat-operators-vntf5\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.537476 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-catalog-content\") pod \"redhat-operators-vntf5\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.556315 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbzkq\" (UniqueName: \"kubernetes.io/projected/732e9196-4820-4603-ab8f-7701bfed4e7b-kube-api-access-xbzkq\") pod \"redhat-operators-vntf5\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:13 crc kubenswrapper[4916]: I1203 20:13:13.788384 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:14 crc kubenswrapper[4916]: I1203 20:13:14.254936 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vntf5"] Dec 03 20:13:15 crc kubenswrapper[4916]: I1203 20:13:15.046197 4916 generic.go:334] "Generic (PLEG): container finished" podID="732e9196-4820-4603-ab8f-7701bfed4e7b" containerID="0fbb95a66152d636b9a5b63002d954d2e365656314c1e868b6154843d1ca97dc" exitCode=0 Dec 03 20:13:15 crc kubenswrapper[4916]: I1203 20:13:15.046249 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vntf5" event={"ID":"732e9196-4820-4603-ab8f-7701bfed4e7b","Type":"ContainerDied","Data":"0fbb95a66152d636b9a5b63002d954d2e365656314c1e868b6154843d1ca97dc"} Dec 03 20:13:15 crc kubenswrapper[4916]: I1203 20:13:15.046528 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vntf5" event={"ID":"732e9196-4820-4603-ab8f-7701bfed4e7b","Type":"ContainerStarted","Data":"8497c24562b76b84225a675a4b3e20cdedbf61f9c6278e217f4f807979088a33"} Dec 03 20:13:16 crc kubenswrapper[4916]: E1203 20:13:16.804703 4916 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod732e9196_4820_4603_ab8f_7701bfed4e7b.slice/crio-conmon-65e401960199afd27a8b14a9db3c2a3bae98bc3f8b6e4137b4176798f9b56208.scope\": RecentStats: unable to find data in memory cache]" Dec 03 20:13:17 crc kubenswrapper[4916]: I1203 20:13:17.065232 4916 generic.go:334] "Generic (PLEG): container finished" podID="732e9196-4820-4603-ab8f-7701bfed4e7b" containerID="65e401960199afd27a8b14a9db3c2a3bae98bc3f8b6e4137b4176798f9b56208" exitCode=0 Dec 03 20:13:17 crc kubenswrapper[4916]: I1203 20:13:17.065281 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vntf5" event={"ID":"732e9196-4820-4603-ab8f-7701bfed4e7b","Type":"ContainerDied","Data":"65e401960199afd27a8b14a9db3c2a3bae98bc3f8b6e4137b4176798f9b56208"} Dec 03 20:13:19 crc kubenswrapper[4916]: I1203 20:13:19.087748 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vntf5" event={"ID":"732e9196-4820-4603-ab8f-7701bfed4e7b","Type":"ContainerStarted","Data":"87cb8e09a7ddf049c22f526d568abecbf82b5fc6c1a90c580be4b6edb5e20d3f"} Dec 03 20:13:19 crc kubenswrapper[4916]: I1203 20:13:19.118406 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vntf5" podStartSLOduration=3.423584847 podStartE2EDuration="6.118383344s" podCreationTimestamp="2025-12-03 20:13:13 +0000 UTC" firstStartedPulling="2025-12-03 20:13:15.047829261 +0000 UTC m=+2611.010639537" lastFinishedPulling="2025-12-03 20:13:17.742627768 +0000 UTC m=+2613.705438034" observedRunningTime="2025-12-03 20:13:19.115112007 +0000 UTC m=+2615.077922303" watchObservedRunningTime="2025-12-03 20:13:19.118383344 +0000 UTC m=+2615.081193620" Dec 03 20:13:23 crc kubenswrapper[4916]: I1203 20:13:23.789069 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:23 crc kubenswrapper[4916]: I1203 20:13:23.791410 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:23 crc kubenswrapper[4916]: I1203 20:13:23.875289 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:24 crc kubenswrapper[4916]: I1203 20:13:24.198641 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:24 crc kubenswrapper[4916]: I1203 20:13:24.250465 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vntf5"] Dec 03 20:13:26 crc kubenswrapper[4916]: I1203 20:13:26.155029 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vntf5" podUID="732e9196-4820-4603-ab8f-7701bfed4e7b" containerName="registry-server" containerID="cri-o://87cb8e09a7ddf049c22f526d568abecbf82b5fc6c1a90c580be4b6edb5e20d3f" gracePeriod=2 Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.182407 4916 generic.go:334] "Generic (PLEG): container finished" podID="732e9196-4820-4603-ab8f-7701bfed4e7b" containerID="87cb8e09a7ddf049c22f526d568abecbf82b5fc6c1a90c580be4b6edb5e20d3f" exitCode=0 Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.182442 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vntf5" event={"ID":"732e9196-4820-4603-ab8f-7701bfed4e7b","Type":"ContainerDied","Data":"87cb8e09a7ddf049c22f526d568abecbf82b5fc6c1a90c580be4b6edb5e20d3f"} Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.389213 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.505507 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbzkq\" (UniqueName: \"kubernetes.io/projected/732e9196-4820-4603-ab8f-7701bfed4e7b-kube-api-access-xbzkq\") pod \"732e9196-4820-4603-ab8f-7701bfed4e7b\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.505685 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-catalog-content\") pod \"732e9196-4820-4603-ab8f-7701bfed4e7b\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.505977 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-utilities\") pod \"732e9196-4820-4603-ab8f-7701bfed4e7b\" (UID: \"732e9196-4820-4603-ab8f-7701bfed4e7b\") " Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.508478 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-utilities" (OuterVolumeSpecName: "utilities") pod "732e9196-4820-4603-ab8f-7701bfed4e7b" (UID: "732e9196-4820-4603-ab8f-7701bfed4e7b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.519036 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/732e9196-4820-4603-ab8f-7701bfed4e7b-kube-api-access-xbzkq" (OuterVolumeSpecName: "kube-api-access-xbzkq") pod "732e9196-4820-4603-ab8f-7701bfed4e7b" (UID: "732e9196-4820-4603-ab8f-7701bfed4e7b"). InnerVolumeSpecName "kube-api-access-xbzkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.609126 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.609163 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbzkq\" (UniqueName: \"kubernetes.io/projected/732e9196-4820-4603-ab8f-7701bfed4e7b-kube-api-access-xbzkq\") on node \"crc\" DevicePath \"\"" Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.634732 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "732e9196-4820-4603-ab8f-7701bfed4e7b" (UID: "732e9196-4820-4603-ab8f-7701bfed4e7b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:13:29 crc kubenswrapper[4916]: I1203 20:13:29.711350 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/732e9196-4820-4603-ab8f-7701bfed4e7b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:13:30 crc kubenswrapper[4916]: I1203 20:13:30.196907 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vntf5" event={"ID":"732e9196-4820-4603-ab8f-7701bfed4e7b","Type":"ContainerDied","Data":"8497c24562b76b84225a675a4b3e20cdedbf61f9c6278e217f4f807979088a33"} Dec 03 20:13:30 crc kubenswrapper[4916]: I1203 20:13:30.197288 4916 scope.go:117] "RemoveContainer" containerID="87cb8e09a7ddf049c22f526d568abecbf82b5fc6c1a90c580be4b6edb5e20d3f" Dec 03 20:13:30 crc kubenswrapper[4916]: I1203 20:13:30.197012 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vntf5" Dec 03 20:13:30 crc kubenswrapper[4916]: I1203 20:13:30.239753 4916 scope.go:117] "RemoveContainer" containerID="65e401960199afd27a8b14a9db3c2a3bae98bc3f8b6e4137b4176798f9b56208" Dec 03 20:13:30 crc kubenswrapper[4916]: I1203 20:13:30.240847 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vntf5"] Dec 03 20:13:30 crc kubenswrapper[4916]: I1203 20:13:30.252216 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vntf5"] Dec 03 20:13:30 crc kubenswrapper[4916]: I1203 20:13:30.287191 4916 scope.go:117] "RemoveContainer" containerID="0fbb95a66152d636b9a5b63002d954d2e365656314c1e868b6154843d1ca97dc" Dec 03 20:13:30 crc kubenswrapper[4916]: I1203 20:13:30.499150 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="732e9196-4820-4603-ab8f-7701bfed4e7b" path="/var/lib/kubelet/pods/732e9196-4820-4603-ab8f-7701bfed4e7b/volumes" Dec 03 20:13:46 crc kubenswrapper[4916]: I1203 20:13:46.163462 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:13:46 crc kubenswrapper[4916]: I1203 20:13:46.164176 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:14:16 crc kubenswrapper[4916]: I1203 20:14:16.159111 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:14:16 crc kubenswrapper[4916]: I1203 20:14:16.159657 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:14:46 crc kubenswrapper[4916]: I1203 20:14:46.159208 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:14:46 crc kubenswrapper[4916]: I1203 20:14:46.159954 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:14:46 crc kubenswrapper[4916]: I1203 20:14:46.160009 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 20:14:46 crc kubenswrapper[4916]: I1203 20:14:46.161071 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"15d5846154948d1852e6ddde861ea5852f573e82e62ac7605b118d9662ac07ae"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 20:14:46 crc kubenswrapper[4916]: I1203 20:14:46.161191 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://15d5846154948d1852e6ddde861ea5852f573e82e62ac7605b118d9662ac07ae" gracePeriod=600 Dec 03 20:14:47 crc kubenswrapper[4916]: I1203 20:14:47.033789 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="15d5846154948d1852e6ddde861ea5852f573e82e62ac7605b118d9662ac07ae" exitCode=0 Dec 03 20:14:47 crc kubenswrapper[4916]: I1203 20:14:47.033834 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"15d5846154948d1852e6ddde861ea5852f573e82e62ac7605b118d9662ac07ae"} Dec 03 20:14:47 crc kubenswrapper[4916]: I1203 20:14:47.034371 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771"} Dec 03 20:14:47 crc kubenswrapper[4916]: I1203 20:14:47.034398 4916 scope.go:117] "RemoveContainer" containerID="64f74a1b0147f2704ef9cffb298d8bb8dd3240462e95b656cd16055a7ec60dab" Dec 03 20:14:59 crc kubenswrapper[4916]: I1203 20:14:59.176798 4916 generic.go:334] "Generic (PLEG): container finished" podID="8eaccb2f-783d-4da3-90ae-c88fdfef6c86" containerID="02b9993ea8b22e9566cfcb405d06cb71d6be512c838e63a0188dd49d96c4a5ef" exitCode=0 Dec 03 20:14:59 crc kubenswrapper[4916]: I1203 20:14:59.176888 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" event={"ID":"8eaccb2f-783d-4da3-90ae-c88fdfef6c86","Type":"ContainerDied","Data":"02b9993ea8b22e9566cfcb405d06cb71d6be512c838e63a0188dd49d96c4a5ef"} Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.148850 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq"] Dec 03 20:15:00 crc kubenswrapper[4916]: E1203 20:15:00.149230 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="732e9196-4820-4603-ab8f-7701bfed4e7b" containerName="registry-server" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.149244 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="732e9196-4820-4603-ab8f-7701bfed4e7b" containerName="registry-server" Dec 03 20:15:00 crc kubenswrapper[4916]: E1203 20:15:00.149257 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="732e9196-4820-4603-ab8f-7701bfed4e7b" containerName="extract-utilities" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.149264 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="732e9196-4820-4603-ab8f-7701bfed4e7b" containerName="extract-utilities" Dec 03 20:15:00 crc kubenswrapper[4916]: E1203 20:15:00.149304 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="732e9196-4820-4603-ab8f-7701bfed4e7b" containerName="extract-content" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.149311 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="732e9196-4820-4603-ab8f-7701bfed4e7b" containerName="extract-content" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.149486 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="732e9196-4820-4603-ab8f-7701bfed4e7b" containerName="registry-server" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.150095 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.152239 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.152945 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.168913 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq"] Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.287960 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqxvb\" (UniqueName: \"kubernetes.io/projected/0ad6c394-09fd-4742-858a-7f52086d5707-kube-api-access-sqxvb\") pod \"collect-profiles-29413215-pblkq\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.288431 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ad6c394-09fd-4742-858a-7f52086d5707-config-volume\") pod \"collect-profiles-29413215-pblkq\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.288463 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ad6c394-09fd-4742-858a-7f52086d5707-secret-volume\") pod \"collect-profiles-29413215-pblkq\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.391175 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqxvb\" (UniqueName: \"kubernetes.io/projected/0ad6c394-09fd-4742-858a-7f52086d5707-kube-api-access-sqxvb\") pod \"collect-profiles-29413215-pblkq\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.391335 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ad6c394-09fd-4742-858a-7f52086d5707-config-volume\") pod \"collect-profiles-29413215-pblkq\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.391369 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ad6c394-09fd-4742-858a-7f52086d5707-secret-volume\") pod \"collect-profiles-29413215-pblkq\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.392780 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ad6c394-09fd-4742-858a-7f52086d5707-config-volume\") pod \"collect-profiles-29413215-pblkq\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.406573 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ad6c394-09fd-4742-858a-7f52086d5707-secret-volume\") pod \"collect-profiles-29413215-pblkq\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.419732 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqxvb\" (UniqueName: \"kubernetes.io/projected/0ad6c394-09fd-4742-858a-7f52086d5707-kube-api-access-sqxvb\") pod \"collect-profiles-29413215-pblkq\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.483999 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.651352 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.797952 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-1\") pod \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.798032 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-extra-config-0\") pod \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.798094 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-combined-ca-bundle\") pod \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.798128 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-0\") pod \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.798210 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-ssh-key\") pod \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.798344 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-0\") pod \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.798382 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fm8gk\" (UniqueName: \"kubernetes.io/projected/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-kube-api-access-fm8gk\") pod \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.798429 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-inventory\") pod \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.798453 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-1\") pod \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\" (UID: \"8eaccb2f-783d-4da3-90ae-c88fdfef6c86\") " Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.808943 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "8eaccb2f-783d-4da3-90ae-c88fdfef6c86" (UID: "8eaccb2f-783d-4da3-90ae-c88fdfef6c86"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.809017 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-kube-api-access-fm8gk" (OuterVolumeSpecName: "kube-api-access-fm8gk") pod "8eaccb2f-783d-4da3-90ae-c88fdfef6c86" (UID: "8eaccb2f-783d-4da3-90ae-c88fdfef6c86"). InnerVolumeSpecName "kube-api-access-fm8gk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.835808 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-inventory" (OuterVolumeSpecName: "inventory") pod "8eaccb2f-783d-4da3-90ae-c88fdfef6c86" (UID: "8eaccb2f-783d-4da3-90ae-c88fdfef6c86"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.835856 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "8eaccb2f-783d-4da3-90ae-c88fdfef6c86" (UID: "8eaccb2f-783d-4da3-90ae-c88fdfef6c86"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.837460 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "8eaccb2f-783d-4da3-90ae-c88fdfef6c86" (UID: "8eaccb2f-783d-4da3-90ae-c88fdfef6c86"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.841401 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "8eaccb2f-783d-4da3-90ae-c88fdfef6c86" (UID: "8eaccb2f-783d-4da3-90ae-c88fdfef6c86"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.843145 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "8eaccb2f-783d-4da3-90ae-c88fdfef6c86" (UID: "8eaccb2f-783d-4da3-90ae-c88fdfef6c86"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.847857 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "8eaccb2f-783d-4da3-90ae-c88fdfef6c86" (UID: "8eaccb2f-783d-4da3-90ae-c88fdfef6c86"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.853314 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8eaccb2f-783d-4da3-90ae-c88fdfef6c86" (UID: "8eaccb2f-783d-4da3-90ae-c88fdfef6c86"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.901241 4916 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.901288 4916 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.901304 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.901318 4916 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.901331 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fm8gk\" (UniqueName: \"kubernetes.io/projected/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-kube-api-access-fm8gk\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.901343 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.901354 4916 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.901365 4916 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.901377 4916 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/8eaccb2f-783d-4da3-90ae-c88fdfef6c86-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:00 crc kubenswrapper[4916]: I1203 20:15:00.990712 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq"] Dec 03 20:15:00 crc kubenswrapper[4916]: W1203 20:15:00.995358 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ad6c394_09fd_4742_858a_7f52086d5707.slice/crio-4d83d3dce6135a054c731a7865aadd955e971215421fa7b8223c73fd34fa56a8 WatchSource:0}: Error finding container 4d83d3dce6135a054c731a7865aadd955e971215421fa7b8223c73fd34fa56a8: Status 404 returned error can't find the container with id 4d83d3dce6135a054c731a7865aadd955e971215421fa7b8223c73fd34fa56a8 Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.205005 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" event={"ID":"8eaccb2f-783d-4da3-90ae-c88fdfef6c86","Type":"ContainerDied","Data":"6df476009c2575840ac82b200a3419825f1f478b66dc3e9e75e0140d13a16b5b"} Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.205054 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6df476009c2575840ac82b200a3419825f1f478b66dc3e9e75e0140d13a16b5b" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.205021 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-zk7x7" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.208284 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" event={"ID":"0ad6c394-09fd-4742-858a-7f52086d5707","Type":"ContainerStarted","Data":"7e86d0f80c007edadfe22440fda8a703e8127f41554b66727114e0266eb1bd5e"} Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.208319 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" event={"ID":"0ad6c394-09fd-4742-858a-7f52086d5707","Type":"ContainerStarted","Data":"4d83d3dce6135a054c731a7865aadd955e971215421fa7b8223c73fd34fa56a8"} Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.234111 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" podStartSLOduration=1.2340823300000001 podStartE2EDuration="1.23408233s" podCreationTimestamp="2025-12-03 20:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:15:01.224911426 +0000 UTC m=+2717.187721702" watchObservedRunningTime="2025-12-03 20:15:01.23408233 +0000 UTC m=+2717.196892596" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.282058 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p"] Dec 03 20:15:01 crc kubenswrapper[4916]: E1203 20:15:01.282501 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eaccb2f-783d-4da3-90ae-c88fdfef6c86" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.282525 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eaccb2f-783d-4da3-90ae-c88fdfef6c86" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.282721 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="8eaccb2f-783d-4da3-90ae-c88fdfef6c86" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.283429 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.287069 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.287157 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.287320 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-vzsls" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.287558 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.287700 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.291307 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p"] Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.308488 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.308550 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.308598 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5cbb\" (UniqueName: \"kubernetes.io/projected/531e7486-e849-4176-b8d7-b93e11082c0a-kube-api-access-d5cbb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.308626 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.308717 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.308751 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.308807 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.410008 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.410061 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.410093 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5cbb\" (UniqueName: \"kubernetes.io/projected/531e7486-e849-4176-b8d7-b93e11082c0a-kube-api-access-d5cbb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.410114 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.410201 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.410232 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.410268 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.416389 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.416398 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.417108 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.422939 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.424137 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.426191 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.434545 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5cbb\" (UniqueName: \"kubernetes.io/projected/531e7486-e849-4176-b8d7-b93e11082c0a-kube-api-access-d5cbb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:01 crc kubenswrapper[4916]: I1203 20:15:01.602727 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:15:02 crc kubenswrapper[4916]: I1203 20:15:02.223681 4916 generic.go:334] "Generic (PLEG): container finished" podID="0ad6c394-09fd-4742-858a-7f52086d5707" containerID="7e86d0f80c007edadfe22440fda8a703e8127f41554b66727114e0266eb1bd5e" exitCode=0 Dec 03 20:15:02 crc kubenswrapper[4916]: I1203 20:15:02.223759 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" event={"ID":"0ad6c394-09fd-4742-858a-7f52086d5707","Type":"ContainerDied","Data":"7e86d0f80c007edadfe22440fda8a703e8127f41554b66727114e0266eb1bd5e"} Dec 03 20:15:02 crc kubenswrapper[4916]: I1203 20:15:02.230383 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p"] Dec 03 20:15:03 crc kubenswrapper[4916]: I1203 20:15:03.233854 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" event={"ID":"531e7486-e849-4176-b8d7-b93e11082c0a","Type":"ContainerStarted","Data":"f38040b8470da836da8615f53061d4391c39f3ead7551ce19e8e4401e504e661"} Dec 03 20:15:04 crc kubenswrapper[4916]: I1203 20:15:04.486935 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:04 crc kubenswrapper[4916]: I1203 20:15:04.584124 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqxvb\" (UniqueName: \"kubernetes.io/projected/0ad6c394-09fd-4742-858a-7f52086d5707-kube-api-access-sqxvb\") pod \"0ad6c394-09fd-4742-858a-7f52086d5707\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " Dec 03 20:15:04 crc kubenswrapper[4916]: I1203 20:15:04.584168 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ad6c394-09fd-4742-858a-7f52086d5707-secret-volume\") pod \"0ad6c394-09fd-4742-858a-7f52086d5707\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " Dec 03 20:15:04 crc kubenswrapper[4916]: I1203 20:15:04.584264 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ad6c394-09fd-4742-858a-7f52086d5707-config-volume\") pod \"0ad6c394-09fd-4742-858a-7f52086d5707\" (UID: \"0ad6c394-09fd-4742-858a-7f52086d5707\") " Dec 03 20:15:04 crc kubenswrapper[4916]: I1203 20:15:04.586232 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ad6c394-09fd-4742-858a-7f52086d5707-config-volume" (OuterVolumeSpecName: "config-volume") pod "0ad6c394-09fd-4742-858a-7f52086d5707" (UID: "0ad6c394-09fd-4742-858a-7f52086d5707"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:15:04 crc kubenswrapper[4916]: I1203 20:15:04.589853 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ad6c394-09fd-4742-858a-7f52086d5707-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0ad6c394-09fd-4742-858a-7f52086d5707" (UID: "0ad6c394-09fd-4742-858a-7f52086d5707"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:15:04 crc kubenswrapper[4916]: I1203 20:15:04.590455 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ad6c394-09fd-4742-858a-7f52086d5707-kube-api-access-sqxvb" (OuterVolumeSpecName: "kube-api-access-sqxvb") pod "0ad6c394-09fd-4742-858a-7f52086d5707" (UID: "0ad6c394-09fd-4742-858a-7f52086d5707"). InnerVolumeSpecName "kube-api-access-sqxvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:15:04 crc kubenswrapper[4916]: I1203 20:15:04.686456 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqxvb\" (UniqueName: \"kubernetes.io/projected/0ad6c394-09fd-4742-858a-7f52086d5707-kube-api-access-sqxvb\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:04 crc kubenswrapper[4916]: I1203 20:15:04.686772 4916 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0ad6c394-09fd-4742-858a-7f52086d5707-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:04 crc kubenswrapper[4916]: I1203 20:15:04.686786 4916 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0ad6c394-09fd-4742-858a-7f52086d5707-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:05 crc kubenswrapper[4916]: I1203 20:15:05.253280 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" event={"ID":"531e7486-e849-4176-b8d7-b93e11082c0a","Type":"ContainerStarted","Data":"edf0f052ed92c122a257b89cd62f5ca0ac356ae12ddc488e0cbfcee5abd53443"} Dec 03 20:15:05 crc kubenswrapper[4916]: I1203 20:15:05.262624 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" event={"ID":"0ad6c394-09fd-4742-858a-7f52086d5707","Type":"ContainerDied","Data":"4d83d3dce6135a054c731a7865aadd955e971215421fa7b8223c73fd34fa56a8"} Dec 03 20:15:05 crc kubenswrapper[4916]: I1203 20:15:05.262711 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d83d3dce6135a054c731a7865aadd955e971215421fa7b8223c73fd34fa56a8" Dec 03 20:15:05 crc kubenswrapper[4916]: I1203 20:15:05.262839 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413215-pblkq" Dec 03 20:15:05 crc kubenswrapper[4916]: I1203 20:15:05.277917 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" podStartSLOduration=2.337909265 podStartE2EDuration="4.277892643s" podCreationTimestamp="2025-12-03 20:15:01 +0000 UTC" firstStartedPulling="2025-12-03 20:15:02.274349373 +0000 UTC m=+2718.237159639" lastFinishedPulling="2025-12-03 20:15:04.214332751 +0000 UTC m=+2720.177143017" observedRunningTime="2025-12-03 20:15:05.276998579 +0000 UTC m=+2721.239808885" watchObservedRunningTime="2025-12-03 20:15:05.277892643 +0000 UTC m=+2721.240702919" Dec 03 20:15:05 crc kubenswrapper[4916]: I1203 20:15:05.571891 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8"] Dec 03 20:15:05 crc kubenswrapper[4916]: I1203 20:15:05.582225 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413170-n5jt8"] Dec 03 20:15:06 crc kubenswrapper[4916]: I1203 20:15:06.490310 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ce6a756-7c72-45f6-abb8-96d9597b7429" path="/var/lib/kubelet/pods/4ce6a756-7c72-45f6-abb8-96d9597b7429/volumes" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.281504 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t95tr"] Dec 03 20:15:21 crc kubenswrapper[4916]: E1203 20:15:21.282396 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ad6c394-09fd-4742-858a-7f52086d5707" containerName="collect-profiles" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.282408 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ad6c394-09fd-4742-858a-7f52086d5707" containerName="collect-profiles" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.282616 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ad6c394-09fd-4742-858a-7f52086d5707" containerName="collect-profiles" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.284247 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.307695 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t95tr"] Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.417943 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsjkk\" (UniqueName: \"kubernetes.io/projected/b7c1439a-76bc-4a28-84f8-3e0ac892a250-kube-api-access-vsjkk\") pod \"certified-operators-t95tr\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.418036 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-catalog-content\") pod \"certified-operators-t95tr\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.418106 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-utilities\") pod \"certified-operators-t95tr\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.520284 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-catalog-content\") pod \"certified-operators-t95tr\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.520461 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-utilities\") pod \"certified-operators-t95tr\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.520653 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsjkk\" (UniqueName: \"kubernetes.io/projected/b7c1439a-76bc-4a28-84f8-3e0ac892a250-kube-api-access-vsjkk\") pod \"certified-operators-t95tr\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.520987 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-catalog-content\") pod \"certified-operators-t95tr\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.521081 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-utilities\") pod \"certified-operators-t95tr\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.545644 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsjkk\" (UniqueName: \"kubernetes.io/projected/b7c1439a-76bc-4a28-84f8-3e0ac892a250-kube-api-access-vsjkk\") pod \"certified-operators-t95tr\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:21 crc kubenswrapper[4916]: I1203 20:15:21.659640 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.079861 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2tgdf"] Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.082161 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.092514 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2tgdf"] Dec 03 20:15:22 crc kubenswrapper[4916]: W1203 20:15:22.147658 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7c1439a_76bc_4a28_84f8_3e0ac892a250.slice/crio-eae42d04bbe8ed86443c95bb4de433246778a447157e344d265ebd48d59b9760 WatchSource:0}: Error finding container eae42d04bbe8ed86443c95bb4de433246778a447157e344d265ebd48d59b9760: Status 404 returned error can't find the container with id eae42d04bbe8ed86443c95bb4de433246778a447157e344d265ebd48d59b9760 Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.177101 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t95tr"] Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.232641 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-catalog-content\") pod \"community-operators-2tgdf\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.232707 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmwwl\" (UniqueName: \"kubernetes.io/projected/d29840ec-58e3-4b69-90ce-ee18c452e917-kube-api-access-vmwwl\") pod \"community-operators-2tgdf\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.232776 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-utilities\") pod \"community-operators-2tgdf\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.334993 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-catalog-content\") pod \"community-operators-2tgdf\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.335065 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmwwl\" (UniqueName: \"kubernetes.io/projected/d29840ec-58e3-4b69-90ce-ee18c452e917-kube-api-access-vmwwl\") pod \"community-operators-2tgdf\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.335165 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-utilities\") pod \"community-operators-2tgdf\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.335652 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-catalog-content\") pod \"community-operators-2tgdf\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.335727 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-utilities\") pod \"community-operators-2tgdf\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.366550 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmwwl\" (UniqueName: \"kubernetes.io/projected/d29840ec-58e3-4b69-90ce-ee18c452e917-kube-api-access-vmwwl\") pod \"community-operators-2tgdf\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.408362 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.426008 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95tr" event={"ID":"b7c1439a-76bc-4a28-84f8-3e0ac892a250","Type":"ContainerStarted","Data":"eae42d04bbe8ed86443c95bb4de433246778a447157e344d265ebd48d59b9760"} Dec 03 20:15:22 crc kubenswrapper[4916]: I1203 20:15:22.966857 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2tgdf"] Dec 03 20:15:23 crc kubenswrapper[4916]: I1203 20:15:23.435215 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tgdf" event={"ID":"d29840ec-58e3-4b69-90ce-ee18c452e917","Type":"ContainerStarted","Data":"f0d9d18e16622429707ad75c4dfaaf506daafb1dbb8fe16a597b8b6cb6fcf7a7"} Dec 03 20:15:23 crc kubenswrapper[4916]: I1203 20:15:23.437431 4916 generic.go:334] "Generic (PLEG): container finished" podID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" containerID="b99a4a4c4e7a54c0336a8ada8812a5afdb9e6c84202d6a7cd00242f63db91490" exitCode=0 Dec 03 20:15:23 crc kubenswrapper[4916]: I1203 20:15:23.437472 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95tr" event={"ID":"b7c1439a-76bc-4a28-84f8-3e0ac892a250","Type":"ContainerDied","Data":"b99a4a4c4e7a54c0336a8ada8812a5afdb9e6c84202d6a7cd00242f63db91490"} Dec 03 20:15:24 crc kubenswrapper[4916]: I1203 20:15:24.447232 4916 generic.go:334] "Generic (PLEG): container finished" podID="d29840ec-58e3-4b69-90ce-ee18c452e917" containerID="40bdf90ef5435b5538fe239ba014490bda83ac6f4f162b40cd25c268b1d2431a" exitCode=0 Dec 03 20:15:24 crc kubenswrapper[4916]: I1203 20:15:24.447487 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tgdf" event={"ID":"d29840ec-58e3-4b69-90ce-ee18c452e917","Type":"ContainerDied","Data":"40bdf90ef5435b5538fe239ba014490bda83ac6f4f162b40cd25c268b1d2431a"} Dec 03 20:15:24 crc kubenswrapper[4916]: I1203 20:15:24.452987 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95tr" event={"ID":"b7c1439a-76bc-4a28-84f8-3e0ac892a250","Type":"ContainerStarted","Data":"3cf00d738b74923e741a773b820e0fa92b26e851da83a46dc7be882aae3537f0"} Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.085530 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qzqbv"] Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.087627 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.105590 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qzqbv"] Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.143950 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dcdc\" (UniqueName: \"kubernetes.io/projected/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-kube-api-access-5dcdc\") pod \"redhat-marketplace-qzqbv\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.144104 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-catalog-content\") pod \"redhat-marketplace-qzqbv\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.144176 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-utilities\") pod \"redhat-marketplace-qzqbv\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.245732 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-catalog-content\") pod \"redhat-marketplace-qzqbv\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.245783 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-utilities\") pod \"redhat-marketplace-qzqbv\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.246097 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dcdc\" (UniqueName: \"kubernetes.io/projected/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-kube-api-access-5dcdc\") pod \"redhat-marketplace-qzqbv\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.246538 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-catalog-content\") pod \"redhat-marketplace-qzqbv\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.246679 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-utilities\") pod \"redhat-marketplace-qzqbv\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.276466 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dcdc\" (UniqueName: \"kubernetes.io/projected/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-kube-api-access-5dcdc\") pod \"redhat-marketplace-qzqbv\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.457997 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.469768 4916 generic.go:334] "Generic (PLEG): container finished" podID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" containerID="3cf00d738b74923e741a773b820e0fa92b26e851da83a46dc7be882aae3537f0" exitCode=0 Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.469818 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95tr" event={"ID":"b7c1439a-76bc-4a28-84f8-3e0ac892a250","Type":"ContainerDied","Data":"3cf00d738b74923e741a773b820e0fa92b26e851da83a46dc7be882aae3537f0"} Dec 03 20:15:25 crc kubenswrapper[4916]: I1203 20:15:25.942632 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qzqbv"] Dec 03 20:15:25 crc kubenswrapper[4916]: W1203 20:15:25.946551 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1eb827e_9c0e_434a_be3d_8ef10b2313c1.slice/crio-0e91937e21dadcfa0f39a3ebf8a9d20e23934ac54558ad8794a349936c87cee7 WatchSource:0}: Error finding container 0e91937e21dadcfa0f39a3ebf8a9d20e23934ac54558ad8794a349936c87cee7: Status 404 returned error can't find the container with id 0e91937e21dadcfa0f39a3ebf8a9d20e23934ac54558ad8794a349936c87cee7 Dec 03 20:15:26 crc kubenswrapper[4916]: I1203 20:15:26.501301 4916 generic.go:334] "Generic (PLEG): container finished" podID="d29840ec-58e3-4b69-90ce-ee18c452e917" containerID="da5bcd21c64538982f24850109c4f169514d84018828543c398859ed1dc808f6" exitCode=0 Dec 03 20:15:26 crc kubenswrapper[4916]: I1203 20:15:26.502215 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95tr" event={"ID":"b7c1439a-76bc-4a28-84f8-3e0ac892a250","Type":"ContainerStarted","Data":"1d61875bfa0de8e17a428eaa3dd0d1ac4e6577dee71786c788b7050a290212ac"} Dec 03 20:15:26 crc kubenswrapper[4916]: I1203 20:15:26.502263 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tgdf" event={"ID":"d29840ec-58e3-4b69-90ce-ee18c452e917","Type":"ContainerDied","Data":"da5bcd21c64538982f24850109c4f169514d84018828543c398859ed1dc808f6"} Dec 03 20:15:26 crc kubenswrapper[4916]: I1203 20:15:26.504542 4916 generic.go:334] "Generic (PLEG): container finished" podID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerID="8b7296411df7f7a1613eeedb24af870dab599a139e6e1d3982b2b5f1b2a1174e" exitCode=0 Dec 03 20:15:26 crc kubenswrapper[4916]: I1203 20:15:26.504582 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qzqbv" event={"ID":"d1eb827e-9c0e-434a-be3d-8ef10b2313c1","Type":"ContainerDied","Data":"8b7296411df7f7a1613eeedb24af870dab599a139e6e1d3982b2b5f1b2a1174e"} Dec 03 20:15:26 crc kubenswrapper[4916]: I1203 20:15:26.504649 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qzqbv" event={"ID":"d1eb827e-9c0e-434a-be3d-8ef10b2313c1","Type":"ContainerStarted","Data":"0e91937e21dadcfa0f39a3ebf8a9d20e23934ac54558ad8794a349936c87cee7"} Dec 03 20:15:26 crc kubenswrapper[4916]: I1203 20:15:26.530425 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t95tr" podStartSLOduration=3.096689143 podStartE2EDuration="5.530403232s" podCreationTimestamp="2025-12-03 20:15:21 +0000 UTC" firstStartedPulling="2025-12-03 20:15:23.439169456 +0000 UTC m=+2739.401979722" lastFinishedPulling="2025-12-03 20:15:25.872883525 +0000 UTC m=+2741.835693811" observedRunningTime="2025-12-03 20:15:26.51975798 +0000 UTC m=+2742.482568256" watchObservedRunningTime="2025-12-03 20:15:26.530403232 +0000 UTC m=+2742.493213498" Dec 03 20:15:31 crc kubenswrapper[4916]: I1203 20:15:31.660688 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:31 crc kubenswrapper[4916]: I1203 20:15:31.661338 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:31 crc kubenswrapper[4916]: I1203 20:15:31.742396 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:32 crc kubenswrapper[4916]: I1203 20:15:32.697708 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:33 crc kubenswrapper[4916]: I1203 20:15:33.579168 4916 generic.go:334] "Generic (PLEG): container finished" podID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerID="5d372511a548b2944c09a9d53100106dc712d9349ad684faca7ebc20d97abfb4" exitCode=0 Dec 03 20:15:33 crc kubenswrapper[4916]: I1203 20:15:33.579244 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qzqbv" event={"ID":"d1eb827e-9c0e-434a-be3d-8ef10b2313c1","Type":"ContainerDied","Data":"5d372511a548b2944c09a9d53100106dc712d9349ad684faca7ebc20d97abfb4"} Dec 03 20:15:33 crc kubenswrapper[4916]: I1203 20:15:33.582261 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tgdf" event={"ID":"d29840ec-58e3-4b69-90ce-ee18c452e917","Type":"ContainerStarted","Data":"bcdc8c3f2e259a30789fc3bd23430d46e415477297454c1780671cfbc4e981de"} Dec 03 20:15:33 crc kubenswrapper[4916]: I1203 20:15:33.626947 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2tgdf" podStartSLOduration=9.132030545 podStartE2EDuration="11.626928108s" podCreationTimestamp="2025-12-03 20:15:22 +0000 UTC" firstStartedPulling="2025-12-03 20:15:24.449873985 +0000 UTC m=+2740.412684291" lastFinishedPulling="2025-12-03 20:15:26.944771568 +0000 UTC m=+2742.907581854" observedRunningTime="2025-12-03 20:15:33.620265361 +0000 UTC m=+2749.583075637" watchObservedRunningTime="2025-12-03 20:15:33.626928108 +0000 UTC m=+2749.589738364" Dec 03 20:15:33 crc kubenswrapper[4916]: I1203 20:15:33.670250 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t95tr"] Dec 03 20:15:34 crc kubenswrapper[4916]: I1203 20:15:34.593745 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qzqbv" event={"ID":"d1eb827e-9c0e-434a-be3d-8ef10b2313c1","Type":"ContainerStarted","Data":"8e28f93a31ba2ae0c2e7976e10fea1e4ff0a6458677c52da8a6b2eff3d7f1353"} Dec 03 20:15:34 crc kubenswrapper[4916]: I1203 20:15:34.594135 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t95tr" podUID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" containerName="registry-server" containerID="cri-o://1d61875bfa0de8e17a428eaa3dd0d1ac4e6577dee71786c788b7050a290212ac" gracePeriod=2 Dec 03 20:15:34 crc kubenswrapper[4916]: I1203 20:15:34.613302 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qzqbv" podStartSLOduration=1.92001566 podStartE2EDuration="9.613273671s" podCreationTimestamp="2025-12-03 20:15:25 +0000 UTC" firstStartedPulling="2025-12-03 20:15:26.506087037 +0000 UTC m=+2742.468897313" lastFinishedPulling="2025-12-03 20:15:34.199345038 +0000 UTC m=+2750.162155324" observedRunningTime="2025-12-03 20:15:34.613045635 +0000 UTC m=+2750.575855911" watchObservedRunningTime="2025-12-03 20:15:34.613273671 +0000 UTC m=+2750.576083977" Dec 03 20:15:35 crc kubenswrapper[4916]: I1203 20:15:35.461766 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:35 crc kubenswrapper[4916]: I1203 20:15:35.462277 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:36 crc kubenswrapper[4916]: I1203 20:15:36.528837 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-qzqbv" podUID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerName="registry-server" probeResult="failure" output=< Dec 03 20:15:36 crc kubenswrapper[4916]: timeout: failed to connect service ":50051" within 1s Dec 03 20:15:36 crc kubenswrapper[4916]: > Dec 03 20:15:36 crc kubenswrapper[4916]: I1203 20:15:36.616690 4916 generic.go:334] "Generic (PLEG): container finished" podID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" containerID="1d61875bfa0de8e17a428eaa3dd0d1ac4e6577dee71786c788b7050a290212ac" exitCode=0 Dec 03 20:15:36 crc kubenswrapper[4916]: I1203 20:15:36.616783 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95tr" event={"ID":"b7c1439a-76bc-4a28-84f8-3e0ac892a250","Type":"ContainerDied","Data":"1d61875bfa0de8e17a428eaa3dd0d1ac4e6577dee71786c788b7050a290212ac"} Dec 03 20:15:37 crc kubenswrapper[4916]: I1203 20:15:37.908886 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.000803 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-catalog-content\") pod \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.000985 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-utilities\") pod \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.001059 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsjkk\" (UniqueName: \"kubernetes.io/projected/b7c1439a-76bc-4a28-84f8-3e0ac892a250-kube-api-access-vsjkk\") pod \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\" (UID: \"b7c1439a-76bc-4a28-84f8-3e0ac892a250\") " Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.001885 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-utilities" (OuterVolumeSpecName: "utilities") pod "b7c1439a-76bc-4a28-84f8-3e0ac892a250" (UID: "b7c1439a-76bc-4a28-84f8-3e0ac892a250"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.026840 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7c1439a-76bc-4a28-84f8-3e0ac892a250-kube-api-access-vsjkk" (OuterVolumeSpecName: "kube-api-access-vsjkk") pod "b7c1439a-76bc-4a28-84f8-3e0ac892a250" (UID: "b7c1439a-76bc-4a28-84f8-3e0ac892a250"). InnerVolumeSpecName "kube-api-access-vsjkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.052141 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7c1439a-76bc-4a28-84f8-3e0ac892a250" (UID: "b7c1439a-76bc-4a28-84f8-3e0ac892a250"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.102230 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.102271 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7c1439a-76bc-4a28-84f8-3e0ac892a250-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.102285 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsjkk\" (UniqueName: \"kubernetes.io/projected/b7c1439a-76bc-4a28-84f8-3e0ac892a250-kube-api-access-vsjkk\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.714235 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t95tr" event={"ID":"b7c1439a-76bc-4a28-84f8-3e0ac892a250","Type":"ContainerDied","Data":"eae42d04bbe8ed86443c95bb4de433246778a447157e344d265ebd48d59b9760"} Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.714303 4916 scope.go:117] "RemoveContainer" containerID="1d61875bfa0de8e17a428eaa3dd0d1ac4e6577dee71786c788b7050a290212ac" Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.714361 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t95tr" Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.740835 4916 scope.go:117] "RemoveContainer" containerID="3cf00d738b74923e741a773b820e0fa92b26e851da83a46dc7be882aae3537f0" Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.750585 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t95tr"] Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.767894 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t95tr"] Dec 03 20:15:38 crc kubenswrapper[4916]: I1203 20:15:38.779677 4916 scope.go:117] "RemoveContainer" containerID="b99a4a4c4e7a54c0336a8ada8812a5afdb9e6c84202d6a7cd00242f63db91490" Dec 03 20:15:40 crc kubenswrapper[4916]: I1203 20:15:40.492223 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" path="/var/lib/kubelet/pods/b7c1439a-76bc-4a28-84f8-3e0ac892a250/volumes" Dec 03 20:15:42 crc kubenswrapper[4916]: I1203 20:15:42.408872 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:42 crc kubenswrapper[4916]: I1203 20:15:42.409257 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:42 crc kubenswrapper[4916]: I1203 20:15:42.490139 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:42 crc kubenswrapper[4916]: I1203 20:15:42.849670 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:45 crc kubenswrapper[4916]: I1203 20:15:45.527164 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:45 crc kubenswrapper[4916]: I1203 20:15:45.586591 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:46 crc kubenswrapper[4916]: I1203 20:15:46.875038 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2tgdf"] Dec 03 20:15:46 crc kubenswrapper[4916]: I1203 20:15:46.875935 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2tgdf" podUID="d29840ec-58e3-4b69-90ce-ee18c452e917" containerName="registry-server" containerID="cri-o://bcdc8c3f2e259a30789fc3bd23430d46e415477297454c1780671cfbc4e981de" gracePeriod=2 Dec 03 20:15:47 crc kubenswrapper[4916]: I1203 20:15:47.881650 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qzqbv"] Dec 03 20:15:47 crc kubenswrapper[4916]: I1203 20:15:47.882171 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qzqbv" podUID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerName="registry-server" containerID="cri-o://8e28f93a31ba2ae0c2e7976e10fea1e4ff0a6458677c52da8a6b2eff3d7f1353" gracePeriod=2 Dec 03 20:15:48 crc kubenswrapper[4916]: I1203 20:15:48.853405 4916 generic.go:334] "Generic (PLEG): container finished" podID="d29840ec-58e3-4b69-90ce-ee18c452e917" containerID="bcdc8c3f2e259a30789fc3bd23430d46e415477297454c1780671cfbc4e981de" exitCode=0 Dec 03 20:15:48 crc kubenswrapper[4916]: I1203 20:15:48.853466 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tgdf" event={"ID":"d29840ec-58e3-4b69-90ce-ee18c452e917","Type":"ContainerDied","Data":"bcdc8c3f2e259a30789fc3bd23430d46e415477297454c1780671cfbc4e981de"} Dec 03 20:15:48 crc kubenswrapper[4916]: I1203 20:15:48.855929 4916 generic.go:334] "Generic (PLEG): container finished" podID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerID="8e28f93a31ba2ae0c2e7976e10fea1e4ff0a6458677c52da8a6b2eff3d7f1353" exitCode=0 Dec 03 20:15:48 crc kubenswrapper[4916]: I1203 20:15:48.855977 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qzqbv" event={"ID":"d1eb827e-9c0e-434a-be3d-8ef10b2313c1","Type":"ContainerDied","Data":"8e28f93a31ba2ae0c2e7976e10fea1e4ff0a6458677c52da8a6b2eff3d7f1353"} Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.154589 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.296545 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmwwl\" (UniqueName: \"kubernetes.io/projected/d29840ec-58e3-4b69-90ce-ee18c452e917-kube-api-access-vmwwl\") pod \"d29840ec-58e3-4b69-90ce-ee18c452e917\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.296690 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-catalog-content\") pod \"d29840ec-58e3-4b69-90ce-ee18c452e917\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.296733 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-utilities\") pod \"d29840ec-58e3-4b69-90ce-ee18c452e917\" (UID: \"d29840ec-58e3-4b69-90ce-ee18c452e917\") " Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.298260 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-utilities" (OuterVolumeSpecName: "utilities") pod "d29840ec-58e3-4b69-90ce-ee18c452e917" (UID: "d29840ec-58e3-4b69-90ce-ee18c452e917"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.308998 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d29840ec-58e3-4b69-90ce-ee18c452e917-kube-api-access-vmwwl" (OuterVolumeSpecName: "kube-api-access-vmwwl") pod "d29840ec-58e3-4b69-90ce-ee18c452e917" (UID: "d29840ec-58e3-4b69-90ce-ee18c452e917"). InnerVolumeSpecName "kube-api-access-vmwwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.351381 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d29840ec-58e3-4b69-90ce-ee18c452e917" (UID: "d29840ec-58e3-4b69-90ce-ee18c452e917"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.398855 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmwwl\" (UniqueName: \"kubernetes.io/projected/d29840ec-58e3-4b69-90ce-ee18c452e917-kube-api-access-vmwwl\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.398892 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.398904 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d29840ec-58e3-4b69-90ce-ee18c452e917-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.866829 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qzqbv" event={"ID":"d1eb827e-9c0e-434a-be3d-8ef10b2313c1","Type":"ContainerDied","Data":"0e91937e21dadcfa0f39a3ebf8a9d20e23934ac54558ad8794a349936c87cee7"} Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.867236 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e91937e21dadcfa0f39a3ebf8a9d20e23934ac54558ad8794a349936c87cee7" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.869363 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tgdf" event={"ID":"d29840ec-58e3-4b69-90ce-ee18c452e917","Type":"ContainerDied","Data":"f0d9d18e16622429707ad75c4dfaaf506daafb1dbb8fe16a597b8b6cb6fcf7a7"} Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.869402 4916 scope.go:117] "RemoveContainer" containerID="bcdc8c3f2e259a30789fc3bd23430d46e415477297454c1780671cfbc4e981de" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.869545 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tgdf" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.934218 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.936061 4916 scope.go:117] "RemoveContainer" containerID="da5bcd21c64538982f24850109c4f169514d84018828543c398859ed1dc808f6" Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.952830 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2tgdf"] Dec 03 20:15:49 crc kubenswrapper[4916]: I1203 20:15:49.957271 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2tgdf"] Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.013336 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-catalog-content\") pod \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.013396 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-utilities\") pod \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.013529 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dcdc\" (UniqueName: \"kubernetes.io/projected/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-kube-api-access-5dcdc\") pod \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\" (UID: \"d1eb827e-9c0e-434a-be3d-8ef10b2313c1\") " Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.015540 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-utilities" (OuterVolumeSpecName: "utilities") pod "d1eb827e-9c0e-434a-be3d-8ef10b2313c1" (UID: "d1eb827e-9c0e-434a-be3d-8ef10b2313c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.018411 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.018611 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-kube-api-access-5dcdc" (OuterVolumeSpecName: "kube-api-access-5dcdc") pod "d1eb827e-9c0e-434a-be3d-8ef10b2313c1" (UID: "d1eb827e-9c0e-434a-be3d-8ef10b2313c1"). InnerVolumeSpecName "kube-api-access-5dcdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.029853 4916 scope.go:117] "RemoveContainer" containerID="40bdf90ef5435b5538fe239ba014490bda83ac6f4f162b40cd25c268b1d2431a" Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.034184 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d1eb827e-9c0e-434a-be3d-8ef10b2313c1" (UID: "d1eb827e-9c0e-434a-be3d-8ef10b2313c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.120187 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.120214 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dcdc\" (UniqueName: \"kubernetes.io/projected/d1eb827e-9c0e-434a-be3d-8ef10b2313c1-kube-api-access-5dcdc\") on node \"crc\" DevicePath \"\"" Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.503275 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d29840ec-58e3-4b69-90ce-ee18c452e917" path="/var/lib/kubelet/pods/d29840ec-58e3-4b69-90ce-ee18c452e917/volumes" Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.900058 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qzqbv" Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.940192 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qzqbv"] Dec 03 20:15:50 crc kubenswrapper[4916]: I1203 20:15:50.952555 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qzqbv"] Dec 03 20:15:52 crc kubenswrapper[4916]: I1203 20:15:52.490423 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" path="/var/lib/kubelet/pods/d1eb827e-9c0e-434a-be3d-8ef10b2313c1/volumes" Dec 03 20:15:53 crc kubenswrapper[4916]: I1203 20:15:53.805819 4916 scope.go:117] "RemoveContainer" containerID="e7e03eda1780dc42fa5b5def994d10f4439d824dedf641dedd995fc87813df52" Dec 03 20:16:46 crc kubenswrapper[4916]: I1203 20:16:46.158441 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:16:46 crc kubenswrapper[4916]: I1203 20:16:46.159168 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:17:16 crc kubenswrapper[4916]: I1203 20:17:16.158363 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:17:16 crc kubenswrapper[4916]: I1203 20:17:16.158965 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:17:42 crc kubenswrapper[4916]: I1203 20:17:42.189389 4916 generic.go:334] "Generic (PLEG): container finished" podID="531e7486-e849-4176-b8d7-b93e11082c0a" containerID="edf0f052ed92c122a257b89cd62f5ca0ac356ae12ddc488e0cbfcee5abd53443" exitCode=0 Dec 03 20:17:42 crc kubenswrapper[4916]: I1203 20:17:42.189526 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" event={"ID":"531e7486-e849-4176-b8d7-b93e11082c0a","Type":"ContainerDied","Data":"edf0f052ed92c122a257b89cd62f5ca0ac356ae12ddc488e0cbfcee5abd53443"} Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.594124 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.697233 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-inventory\") pod \"531e7486-e849-4176-b8d7-b93e11082c0a\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.697391 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-1\") pod \"531e7486-e849-4176-b8d7-b93e11082c0a\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.697419 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-telemetry-combined-ca-bundle\") pod \"531e7486-e849-4176-b8d7-b93e11082c0a\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.697444 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-0\") pod \"531e7486-e849-4176-b8d7-b93e11082c0a\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.697491 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ssh-key\") pod \"531e7486-e849-4176-b8d7-b93e11082c0a\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.697528 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5cbb\" (UniqueName: \"kubernetes.io/projected/531e7486-e849-4176-b8d7-b93e11082c0a-kube-api-access-d5cbb\") pod \"531e7486-e849-4176-b8d7-b93e11082c0a\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.697805 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-2\") pod \"531e7486-e849-4176-b8d7-b93e11082c0a\" (UID: \"531e7486-e849-4176-b8d7-b93e11082c0a\") " Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.703816 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "531e7486-e849-4176-b8d7-b93e11082c0a" (UID: "531e7486-e849-4176-b8d7-b93e11082c0a"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.705600 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/531e7486-e849-4176-b8d7-b93e11082c0a-kube-api-access-d5cbb" (OuterVolumeSpecName: "kube-api-access-d5cbb") pod "531e7486-e849-4176-b8d7-b93e11082c0a" (UID: "531e7486-e849-4176-b8d7-b93e11082c0a"). InnerVolumeSpecName "kube-api-access-d5cbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.740449 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "531e7486-e849-4176-b8d7-b93e11082c0a" (UID: "531e7486-e849-4176-b8d7-b93e11082c0a"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.742241 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "531e7486-e849-4176-b8d7-b93e11082c0a" (UID: "531e7486-e849-4176-b8d7-b93e11082c0a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.745188 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "531e7486-e849-4176-b8d7-b93e11082c0a" (UID: "531e7486-e849-4176-b8d7-b93e11082c0a"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.754827 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-inventory" (OuterVolumeSpecName: "inventory") pod "531e7486-e849-4176-b8d7-b93e11082c0a" (UID: "531e7486-e849-4176-b8d7-b93e11082c0a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.757725 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "531e7486-e849-4176-b8d7-b93e11082c0a" (UID: "531e7486-e849-4176-b8d7-b93e11082c0a"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.799805 4916 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.799844 4916 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.799857 4916 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.799873 4916 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.799887 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5cbb\" (UniqueName: \"kubernetes.io/projected/531e7486-e849-4176-b8d7-b93e11082c0a-kube-api-access-d5cbb\") on node \"crc\" DevicePath \"\"" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.799899 4916 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 03 20:17:43 crc kubenswrapper[4916]: I1203 20:17:43.799912 4916 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/531e7486-e849-4176-b8d7-b93e11082c0a-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 20:17:44 crc kubenswrapper[4916]: I1203 20:17:44.212382 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" event={"ID":"531e7486-e849-4176-b8d7-b93e11082c0a","Type":"ContainerDied","Data":"f38040b8470da836da8615f53061d4391c39f3ead7551ce19e8e4401e504e661"} Dec 03 20:17:44 crc kubenswrapper[4916]: I1203 20:17:44.212443 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f38040b8470da836da8615f53061d4391c39f3ead7551ce19e8e4401e504e661" Dec 03 20:17:44 crc kubenswrapper[4916]: I1203 20:17:44.212473 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p" Dec 03 20:17:46 crc kubenswrapper[4916]: I1203 20:17:46.158480 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:17:46 crc kubenswrapper[4916]: I1203 20:17:46.158846 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:17:46 crc kubenswrapper[4916]: I1203 20:17:46.158890 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 20:17:46 crc kubenswrapper[4916]: I1203 20:17:46.159488 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 20:17:46 crc kubenswrapper[4916]: I1203 20:17:46.159547 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" gracePeriod=600 Dec 03 20:17:48 crc kubenswrapper[4916]: E1203 20:17:48.240025 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:17:48 crc kubenswrapper[4916]: I1203 20:17:48.260984 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" exitCode=0 Dec 03 20:17:48 crc kubenswrapper[4916]: I1203 20:17:48.261062 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771"} Dec 03 20:17:48 crc kubenswrapper[4916]: I1203 20:17:48.261126 4916 scope.go:117] "RemoveContainer" containerID="15d5846154948d1852e6ddde861ea5852f573e82e62ac7605b118d9662ac07ae" Dec 03 20:17:48 crc kubenswrapper[4916]: I1203 20:17:48.261989 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:17:48 crc kubenswrapper[4916]: E1203 20:17:48.262473 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:18:00 crc kubenswrapper[4916]: I1203 20:18:00.487646 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:18:00 crc kubenswrapper[4916]: E1203 20:18:00.488528 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:18:13 crc kubenswrapper[4916]: I1203 20:18:13.483293 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:18:13 crc kubenswrapper[4916]: E1203 20:18:13.484233 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:18:25 crc kubenswrapper[4916]: I1203 20:18:25.478540 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:18:25 crc kubenswrapper[4916]: E1203 20:18:25.479692 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:18:36 crc kubenswrapper[4916]: I1203 20:18:36.478791 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:18:36 crc kubenswrapper[4916]: E1203 20:18:36.479740 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:18:50 crc kubenswrapper[4916]: I1203 20:18:50.480044 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:18:50 crc kubenswrapper[4916]: E1203 20:18:50.481039 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:19:02 crc kubenswrapper[4916]: I1203 20:19:02.478820 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:19:02 crc kubenswrapper[4916]: E1203 20:19:02.480162 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:19:13 crc kubenswrapper[4916]: I1203 20:19:13.478879 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:19:13 crc kubenswrapper[4916]: E1203 20:19:13.479760 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:19:26 crc kubenswrapper[4916]: I1203 20:19:26.479021 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:19:26 crc kubenswrapper[4916]: E1203 20:19:26.479856 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:19:40 crc kubenswrapper[4916]: I1203 20:19:40.479335 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:19:40 crc kubenswrapper[4916]: E1203 20:19:40.480520 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:19:55 crc kubenswrapper[4916]: I1203 20:19:55.478212 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:19:55 crc kubenswrapper[4916]: E1203 20:19:55.479048 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:20:10 crc kubenswrapper[4916]: I1203 20:20:10.477796 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:20:10 crc kubenswrapper[4916]: E1203 20:20:10.478553 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:20:25 crc kubenswrapper[4916]: I1203 20:20:25.478104 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:20:25 crc kubenswrapper[4916]: E1203 20:20:25.479366 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:20:38 crc kubenswrapper[4916]: I1203 20:20:38.478352 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:20:38 crc kubenswrapper[4916]: E1203 20:20:38.479612 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:20:41 crc kubenswrapper[4916]: I1203 20:20:41.109821 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_6f4635b6-2410-4d5f-a7c9-3cf0a04739f7/manager/0.log" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.028351 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.029138 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="6b5a6645-8305-4075-b2c2-a243645d7bf3" containerName="openstackclient" containerID="cri-o://e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f" gracePeriod=2 Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.048006 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064113 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 20:20:43 crc kubenswrapper[4916]: E1203 20:20:43.064600 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerName="extract-content" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064620 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerName="extract-content" Dec 03 20:20:43 crc kubenswrapper[4916]: E1203 20:20:43.064633 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b5a6645-8305-4075-b2c2-a243645d7bf3" containerName="openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064641 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b5a6645-8305-4075-b2c2-a243645d7bf3" containerName="openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: E1203 20:20:43.064661 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" containerName="extract-utilities" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064669 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" containerName="extract-utilities" Dec 03 20:20:43 crc kubenswrapper[4916]: E1203 20:20:43.064682 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="531e7486-e849-4176-b8d7-b93e11082c0a" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064692 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="531e7486-e849-4176-b8d7-b93e11082c0a" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 20:20:43 crc kubenswrapper[4916]: E1203 20:20:43.064705 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d29840ec-58e3-4b69-90ce-ee18c452e917" containerName="registry-server" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064712 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d29840ec-58e3-4b69-90ce-ee18c452e917" containerName="registry-server" Dec 03 20:20:43 crc kubenswrapper[4916]: E1203 20:20:43.064723 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" containerName="extract-content" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064732 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" containerName="extract-content" Dec 03 20:20:43 crc kubenswrapper[4916]: E1203 20:20:43.064755 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d29840ec-58e3-4b69-90ce-ee18c452e917" containerName="extract-content" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064764 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d29840ec-58e3-4b69-90ce-ee18c452e917" containerName="extract-content" Dec 03 20:20:43 crc kubenswrapper[4916]: E1203 20:20:43.064778 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerName="registry-server" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064806 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerName="registry-server" Dec 03 20:20:43 crc kubenswrapper[4916]: E1203 20:20:43.064824 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" containerName="registry-server" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064831 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" containerName="registry-server" Dec 03 20:20:43 crc kubenswrapper[4916]: E1203 20:20:43.064842 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d29840ec-58e3-4b69-90ce-ee18c452e917" containerName="extract-utilities" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064850 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d29840ec-58e3-4b69-90ce-ee18c452e917" containerName="extract-utilities" Dec 03 20:20:43 crc kubenswrapper[4916]: E1203 20:20:43.064870 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerName="extract-utilities" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.064878 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerName="extract-utilities" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.065091 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="d29840ec-58e3-4b69-90ce-ee18c452e917" containerName="registry-server" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.065111 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7c1439a-76bc-4a28-84f8-3e0ac892a250" containerName="registry-server" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.065124 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1eb827e-9c0e-434a-be3d-8ef10b2313c1" containerName="registry-server" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.065143 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b5a6645-8305-4075-b2c2-a243645d7bf3" containerName="openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.065166 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="531e7486-e849-4176-b8d7-b93e11082c0a" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.065900 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.075594 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.090090 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="6b5a6645-8305-4075-b2c2-a243645d7bf3" podUID="57c2516a-b9ff-4816-947e-070103fba378" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.093491 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-combined-ca-bundle\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.093591 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvjw5\" (UniqueName: \"kubernetes.io/projected/57c2516a-b9ff-4816-947e-070103fba378-kube-api-access-lvjw5\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.093809 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-openstack-config-secret\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.093953 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/57c2516a-b9ff-4816-947e-070103fba378-openstack-config\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.196065 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-openstack-config-secret\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.196175 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/57c2516a-b9ff-4816-947e-070103fba378-openstack-config\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.196205 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-combined-ca-bundle\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.196333 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvjw5\" (UniqueName: \"kubernetes.io/projected/57c2516a-b9ff-4816-947e-070103fba378-kube-api-access-lvjw5\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.197705 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/57c2516a-b9ff-4816-947e-070103fba378-openstack-config\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.202219 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-openstack-config-secret\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.202861 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-combined-ca-bundle\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.224046 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvjw5\" (UniqueName: \"kubernetes.io/projected/57c2516a-b9ff-4816-947e-070103fba378-kube-api-access-lvjw5\") pod \"openstackclient\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.394994 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:20:43 crc kubenswrapper[4916]: I1203 20:20:43.943019 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.126239 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-4sqlz"] Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.127527 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-4sqlz" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.147961 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-4sqlz"] Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.225286 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-5a36-account-create-update-mqtnh"] Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.226434 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5a36-account-create-update-mqtnh" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.235312 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.236168 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skwnk\" (UniqueName: \"kubernetes.io/projected/5a91d358-d4b5-4314-ac74-e681d52598ca-kube-api-access-skwnk\") pod \"aodh-db-create-4sqlz\" (UID: \"5a91d358-d4b5-4314-ac74-e681d52598ca\") " pod="openstack/aodh-db-create-4sqlz" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.236300 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a91d358-d4b5-4314-ac74-e681d52598ca-operator-scripts\") pod \"aodh-db-create-4sqlz\" (UID: \"5a91d358-d4b5-4314-ac74-e681d52598ca\") " pod="openstack/aodh-db-create-4sqlz" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.249682 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-5a36-account-create-update-mqtnh"] Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.337769 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a91d358-d4b5-4314-ac74-e681d52598ca-operator-scripts\") pod \"aodh-db-create-4sqlz\" (UID: \"5a91d358-d4b5-4314-ac74-e681d52598ca\") " pod="openstack/aodh-db-create-4sqlz" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.338044 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb2a64b8-3927-4574-82be-abc3d0b7d92c-operator-scripts\") pod \"aodh-5a36-account-create-update-mqtnh\" (UID: \"eb2a64b8-3927-4574-82be-abc3d0b7d92c\") " pod="openstack/aodh-5a36-account-create-update-mqtnh" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.338203 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t697\" (UniqueName: \"kubernetes.io/projected/eb2a64b8-3927-4574-82be-abc3d0b7d92c-kube-api-access-6t697\") pod \"aodh-5a36-account-create-update-mqtnh\" (UID: \"eb2a64b8-3927-4574-82be-abc3d0b7d92c\") " pod="openstack/aodh-5a36-account-create-update-mqtnh" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.338323 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skwnk\" (UniqueName: \"kubernetes.io/projected/5a91d358-d4b5-4314-ac74-e681d52598ca-kube-api-access-skwnk\") pod \"aodh-db-create-4sqlz\" (UID: \"5a91d358-d4b5-4314-ac74-e681d52598ca\") " pod="openstack/aodh-db-create-4sqlz" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.338418 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a91d358-d4b5-4314-ac74-e681d52598ca-operator-scripts\") pod \"aodh-db-create-4sqlz\" (UID: \"5a91d358-d4b5-4314-ac74-e681d52598ca\") " pod="openstack/aodh-db-create-4sqlz" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.358845 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skwnk\" (UniqueName: \"kubernetes.io/projected/5a91d358-d4b5-4314-ac74-e681d52598ca-kube-api-access-skwnk\") pod \"aodh-db-create-4sqlz\" (UID: \"5a91d358-d4b5-4314-ac74-e681d52598ca\") " pod="openstack/aodh-db-create-4sqlz" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.376554 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"57c2516a-b9ff-4816-947e-070103fba378","Type":"ContainerStarted","Data":"eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205"} Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.376634 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"57c2516a-b9ff-4816-947e-070103fba378","Type":"ContainerStarted","Data":"42602efefddbe3790a6cb5a0a8ffa2289fa63c78099392d7db71b9f9f281d419"} Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.400169 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.400145193 podStartE2EDuration="1.400145193s" podCreationTimestamp="2025-12-03 20:20:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:20:44.392217043 +0000 UTC m=+3060.355027309" watchObservedRunningTime="2025-12-03 20:20:44.400145193 +0000 UTC m=+3060.362955459" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.440215 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t697\" (UniqueName: \"kubernetes.io/projected/eb2a64b8-3927-4574-82be-abc3d0b7d92c-kube-api-access-6t697\") pod \"aodh-5a36-account-create-update-mqtnh\" (UID: \"eb2a64b8-3927-4574-82be-abc3d0b7d92c\") " pod="openstack/aodh-5a36-account-create-update-mqtnh" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.441216 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb2a64b8-3927-4574-82be-abc3d0b7d92c-operator-scripts\") pod \"aodh-5a36-account-create-update-mqtnh\" (UID: \"eb2a64b8-3927-4574-82be-abc3d0b7d92c\") " pod="openstack/aodh-5a36-account-create-update-mqtnh" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.441915 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb2a64b8-3927-4574-82be-abc3d0b7d92c-operator-scripts\") pod \"aodh-5a36-account-create-update-mqtnh\" (UID: \"eb2a64b8-3927-4574-82be-abc3d0b7d92c\") " pod="openstack/aodh-5a36-account-create-update-mqtnh" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.458092 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-4sqlz" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.461770 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t697\" (UniqueName: \"kubernetes.io/projected/eb2a64b8-3927-4574-82be-abc3d0b7d92c-kube-api-access-6t697\") pod \"aodh-5a36-account-create-update-mqtnh\" (UID: \"eb2a64b8-3927-4574-82be-abc3d0b7d92c\") " pod="openstack/aodh-5a36-account-create-update-mqtnh" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.579398 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5a36-account-create-update-mqtnh" Dec 03 20:20:44 crc kubenswrapper[4916]: I1203 20:20:44.904845 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-4sqlz"] Dec 03 20:20:44 crc kubenswrapper[4916]: W1203 20:20:44.917755 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a91d358_d4b5_4314_ac74_e681d52598ca.slice/crio-958c215d59c525cee2b66475c5080cace66867129b2d09e48217178b7ac77327 WatchSource:0}: Error finding container 958c215d59c525cee2b66475c5080cace66867129b2d09e48217178b7ac77327: Status 404 returned error can't find the container with id 958c215d59c525cee2b66475c5080cace66867129b2d09e48217178b7ac77327 Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.028759 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-5a36-account-create-update-mqtnh"] Dec 03 20:20:45 crc kubenswrapper[4916]: W1203 20:20:45.038734 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb2a64b8_3927_4574_82be_abc3d0b7d92c.slice/crio-0d0e747aef49baf006d3807f53f0ebff9791d4bad4a807fe3c9f4082f42862e9 WatchSource:0}: Error finding container 0d0e747aef49baf006d3807f53f0ebff9791d4bad4a807fe3c9f4082f42862e9: Status 404 returned error can't find the container with id 0d0e747aef49baf006d3807f53f0ebff9791d4bad4a807fe3c9f4082f42862e9 Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.043172 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.320269 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.325325 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="6b5a6645-8305-4075-b2c2-a243645d7bf3" podUID="57c2516a-b9ff-4816-947e-070103fba378" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.361405 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config\") pod \"6b5a6645-8305-4075-b2c2-a243645d7bf3\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.361507 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jw82\" (UniqueName: \"kubernetes.io/projected/6b5a6645-8305-4075-b2c2-a243645d7bf3-kube-api-access-4jw82\") pod \"6b5a6645-8305-4075-b2c2-a243645d7bf3\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.361599 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-combined-ca-bundle\") pod \"6b5a6645-8305-4075-b2c2-a243645d7bf3\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.361724 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config-secret\") pod \"6b5a6645-8305-4075-b2c2-a243645d7bf3\" (UID: \"6b5a6645-8305-4075-b2c2-a243645d7bf3\") " Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.366331 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b5a6645-8305-4075-b2c2-a243645d7bf3-kube-api-access-4jw82" (OuterVolumeSpecName: "kube-api-access-4jw82") pod "6b5a6645-8305-4075-b2c2-a243645d7bf3" (UID: "6b5a6645-8305-4075-b2c2-a243645d7bf3"). InnerVolumeSpecName "kube-api-access-4jw82". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.392283 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "6b5a6645-8305-4075-b2c2-a243645d7bf3" (UID: "6b5a6645-8305-4075-b2c2-a243645d7bf3"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.393011 4916 generic.go:334] "Generic (PLEG): container finished" podID="5a91d358-d4b5-4314-ac74-e681d52598ca" containerID="24d847fa2195a6bdb3626d0b95631089d41401ef649704591ad523c23b613b44" exitCode=0 Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.393086 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-4sqlz" event={"ID":"5a91d358-d4b5-4314-ac74-e681d52598ca","Type":"ContainerDied","Data":"24d847fa2195a6bdb3626d0b95631089d41401ef649704591ad523c23b613b44"} Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.393118 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-4sqlz" event={"ID":"5a91d358-d4b5-4314-ac74-e681d52598ca","Type":"ContainerStarted","Data":"958c215d59c525cee2b66475c5080cace66867129b2d09e48217178b7ac77327"} Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.397834 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-5a36-account-create-update-mqtnh" event={"ID":"eb2a64b8-3927-4574-82be-abc3d0b7d92c","Type":"ContainerStarted","Data":"efe20865ebbda02b6521268314f8b5287ec38531b5af74b5b52eab3283db42f7"} Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.397857 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-5a36-account-create-update-mqtnh" event={"ID":"eb2a64b8-3927-4574-82be-abc3d0b7d92c","Type":"ContainerStarted","Data":"0d0e747aef49baf006d3807f53f0ebff9791d4bad4a807fe3c9f4082f42862e9"} Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.399755 4916 generic.go:334] "Generic (PLEG): container finished" podID="6b5a6645-8305-4075-b2c2-a243645d7bf3" containerID="e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f" exitCode=137 Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.400326 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.400864 4916 scope.go:117] "RemoveContainer" containerID="e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.411612 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="6b5a6645-8305-4075-b2c2-a243645d7bf3" podUID="57c2516a-b9ff-4816-947e-070103fba378" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.419245 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b5a6645-8305-4075-b2c2-a243645d7bf3" (UID: "6b5a6645-8305-4075-b2c2-a243645d7bf3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.430709 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-5a36-account-create-update-mqtnh" podStartSLOduration=1.430691913 podStartE2EDuration="1.430691913s" podCreationTimestamp="2025-12-03 20:20:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:20:45.424234452 +0000 UTC m=+3061.387044728" watchObservedRunningTime="2025-12-03 20:20:45.430691913 +0000 UTC m=+3061.393502179" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.438872 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "6b5a6645-8305-4075-b2c2-a243645d7bf3" (UID: "6b5a6645-8305-4075-b2c2-a243645d7bf3"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.464408 4916 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.464440 4916 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6b5a6645-8305-4075-b2c2-a243645d7bf3-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.464449 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jw82\" (UniqueName: \"kubernetes.io/projected/6b5a6645-8305-4075-b2c2-a243645d7bf3-kube-api-access-4jw82\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.464457 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b5a6645-8305-4075-b2c2-a243645d7bf3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.578745 4916 scope.go:117] "RemoveContainer" containerID="e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f" Dec 03 20:20:45 crc kubenswrapper[4916]: E1203 20:20:45.579416 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f\": container with ID starting with e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f not found: ID does not exist" containerID="e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.579466 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f"} err="failed to get container status \"e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f\": rpc error: code = NotFound desc = could not find container \"e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f\": container with ID starting with e64967378cc6b51e11b3aef3fb937362429d1198a64cf7f86b19c0dab070cb6f not found: ID does not exist" Dec 03 20:20:45 crc kubenswrapper[4916]: I1203 20:20:45.718618 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="6b5a6645-8305-4075-b2c2-a243645d7bf3" podUID="57c2516a-b9ff-4816-947e-070103fba378" Dec 03 20:20:46 crc kubenswrapper[4916]: I1203 20:20:46.414828 4916 generic.go:334] "Generic (PLEG): container finished" podID="eb2a64b8-3927-4574-82be-abc3d0b7d92c" containerID="efe20865ebbda02b6521268314f8b5287ec38531b5af74b5b52eab3283db42f7" exitCode=0 Dec 03 20:20:46 crc kubenswrapper[4916]: I1203 20:20:46.414890 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-5a36-account-create-update-mqtnh" event={"ID":"eb2a64b8-3927-4574-82be-abc3d0b7d92c","Type":"ContainerDied","Data":"efe20865ebbda02b6521268314f8b5287ec38531b5af74b5b52eab3283db42f7"} Dec 03 20:20:46 crc kubenswrapper[4916]: I1203 20:20:46.499119 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b5a6645-8305-4075-b2c2-a243645d7bf3" path="/var/lib/kubelet/pods/6b5a6645-8305-4075-b2c2-a243645d7bf3/volumes" Dec 03 20:20:46 crc kubenswrapper[4916]: I1203 20:20:46.828040 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-4sqlz" Dec 03 20:20:46 crc kubenswrapper[4916]: I1203 20:20:46.899759 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a91d358-d4b5-4314-ac74-e681d52598ca-operator-scripts\") pod \"5a91d358-d4b5-4314-ac74-e681d52598ca\" (UID: \"5a91d358-d4b5-4314-ac74-e681d52598ca\") " Dec 03 20:20:46 crc kubenswrapper[4916]: I1203 20:20:46.900127 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skwnk\" (UniqueName: \"kubernetes.io/projected/5a91d358-d4b5-4314-ac74-e681d52598ca-kube-api-access-skwnk\") pod \"5a91d358-d4b5-4314-ac74-e681d52598ca\" (UID: \"5a91d358-d4b5-4314-ac74-e681d52598ca\") " Dec 03 20:20:46 crc kubenswrapper[4916]: I1203 20:20:46.900473 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5a91d358-d4b5-4314-ac74-e681d52598ca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5a91d358-d4b5-4314-ac74-e681d52598ca" (UID: "5a91d358-d4b5-4314-ac74-e681d52598ca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:20:46 crc kubenswrapper[4916]: I1203 20:20:46.901018 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a91d358-d4b5-4314-ac74-e681d52598ca-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:46 crc kubenswrapper[4916]: I1203 20:20:46.907603 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a91d358-d4b5-4314-ac74-e681d52598ca-kube-api-access-skwnk" (OuterVolumeSpecName: "kube-api-access-skwnk") pod "5a91d358-d4b5-4314-ac74-e681d52598ca" (UID: "5a91d358-d4b5-4314-ac74-e681d52598ca"). InnerVolumeSpecName "kube-api-access-skwnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:20:47 crc kubenswrapper[4916]: I1203 20:20:47.007232 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skwnk\" (UniqueName: \"kubernetes.io/projected/5a91d358-d4b5-4314-ac74-e681d52598ca-kube-api-access-skwnk\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:47 crc kubenswrapper[4916]: I1203 20:20:47.434536 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-4sqlz" Dec 03 20:20:47 crc kubenswrapper[4916]: I1203 20:20:47.434512 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-4sqlz" event={"ID":"5a91d358-d4b5-4314-ac74-e681d52598ca","Type":"ContainerDied","Data":"958c215d59c525cee2b66475c5080cace66867129b2d09e48217178b7ac77327"} Dec 03 20:20:47 crc kubenswrapper[4916]: I1203 20:20:47.436273 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="958c215d59c525cee2b66475c5080cace66867129b2d09e48217178b7ac77327" Dec 03 20:20:47 crc kubenswrapper[4916]: I1203 20:20:47.854795 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5a36-account-create-update-mqtnh" Dec 03 20:20:47 crc kubenswrapper[4916]: I1203 20:20:47.925179 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6t697\" (UniqueName: \"kubernetes.io/projected/eb2a64b8-3927-4574-82be-abc3d0b7d92c-kube-api-access-6t697\") pod \"eb2a64b8-3927-4574-82be-abc3d0b7d92c\" (UID: \"eb2a64b8-3927-4574-82be-abc3d0b7d92c\") " Dec 03 20:20:47 crc kubenswrapper[4916]: I1203 20:20:47.926753 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb2a64b8-3927-4574-82be-abc3d0b7d92c-operator-scripts\") pod \"eb2a64b8-3927-4574-82be-abc3d0b7d92c\" (UID: \"eb2a64b8-3927-4574-82be-abc3d0b7d92c\") " Dec 03 20:20:47 crc kubenswrapper[4916]: I1203 20:20:47.927413 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb2a64b8-3927-4574-82be-abc3d0b7d92c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eb2a64b8-3927-4574-82be-abc3d0b7d92c" (UID: "eb2a64b8-3927-4574-82be-abc3d0b7d92c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:20:47 crc kubenswrapper[4916]: I1203 20:20:47.947881 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb2a64b8-3927-4574-82be-abc3d0b7d92c-kube-api-access-6t697" (OuterVolumeSpecName: "kube-api-access-6t697") pod "eb2a64b8-3927-4574-82be-abc3d0b7d92c" (UID: "eb2a64b8-3927-4574-82be-abc3d0b7d92c"). InnerVolumeSpecName "kube-api-access-6t697". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:20:48 crc kubenswrapper[4916]: I1203 20:20:48.029013 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6t697\" (UniqueName: \"kubernetes.io/projected/eb2a64b8-3927-4574-82be-abc3d0b7d92c-kube-api-access-6t697\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:48 crc kubenswrapper[4916]: I1203 20:20:48.029354 4916 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb2a64b8-3927-4574-82be-abc3d0b7d92c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:48 crc kubenswrapper[4916]: I1203 20:20:48.459339 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-5a36-account-create-update-mqtnh" event={"ID":"eb2a64b8-3927-4574-82be-abc3d0b7d92c","Type":"ContainerDied","Data":"0d0e747aef49baf006d3807f53f0ebff9791d4bad4a807fe3c9f4082f42862e9"} Dec 03 20:20:48 crc kubenswrapper[4916]: I1203 20:20:48.459397 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d0e747aef49baf006d3807f53f0ebff9791d4bad4a807fe3c9f4082f42862e9" Dec 03 20:20:48 crc kubenswrapper[4916]: I1203 20:20:48.459407 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-5a36-account-create-update-mqtnh" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.497013 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-69lwf"] Dec 03 20:20:49 crc kubenswrapper[4916]: E1203 20:20:49.497845 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb2a64b8-3927-4574-82be-abc3d0b7d92c" containerName="mariadb-account-create-update" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.497865 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb2a64b8-3927-4574-82be-abc3d0b7d92c" containerName="mariadb-account-create-update" Dec 03 20:20:49 crc kubenswrapper[4916]: E1203 20:20:49.497890 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a91d358-d4b5-4314-ac74-e681d52598ca" containerName="mariadb-database-create" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.497898 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a91d358-d4b5-4314-ac74-e681d52598ca" containerName="mariadb-database-create" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.498137 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a91d358-d4b5-4314-ac74-e681d52598ca" containerName="mariadb-database-create" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.498177 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb2a64b8-3927-4574-82be-abc3d0b7d92c" containerName="mariadb-account-create-update" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.499306 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.502754 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-cqszc" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.502843 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.503082 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.503306 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.524750 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-69lwf"] Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.573807 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-combined-ca-bundle\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.573894 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcpzc\" (UniqueName: \"kubernetes.io/projected/070ffe05-42a7-471d-a027-886ec97d915c-kube-api-access-bcpzc\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.573935 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-config-data\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.573972 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-scripts\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.675704 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-combined-ca-bundle\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.677094 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcpzc\" (UniqueName: \"kubernetes.io/projected/070ffe05-42a7-471d-a027-886ec97d915c-kube-api-access-bcpzc\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.677184 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-config-data\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.677282 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-scripts\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.690206 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-scripts\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.690798 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-config-data\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.691278 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-combined-ca-bundle\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.706362 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcpzc\" (UniqueName: \"kubernetes.io/projected/070ffe05-42a7-471d-a027-886ec97d915c-kube-api-access-bcpzc\") pod \"aodh-db-sync-69lwf\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:49 crc kubenswrapper[4916]: I1203 20:20:49.823616 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:50 crc kubenswrapper[4916]: I1203 20:20:50.378896 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-69lwf"] Dec 03 20:20:50 crc kubenswrapper[4916]: I1203 20:20:50.388493 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 20:20:50 crc kubenswrapper[4916]: I1203 20:20:50.481557 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:20:50 crc kubenswrapper[4916]: E1203 20:20:50.482284 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:20:50 crc kubenswrapper[4916]: I1203 20:20:50.492366 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-69lwf" event={"ID":"070ffe05-42a7-471d-a027-886ec97d915c","Type":"ContainerStarted","Data":"ef9bbe43f963834307cba9aa5267b49981d73b58244b9f670e426484a3d872c2"} Dec 03 20:20:54 crc kubenswrapper[4916]: I1203 20:20:54.536261 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-69lwf" event={"ID":"070ffe05-42a7-471d-a027-886ec97d915c","Type":"ContainerStarted","Data":"307afd2b67d24f67f95001927848f8449eb93f89eca3f711c977872b16529509"} Dec 03 20:20:54 crc kubenswrapper[4916]: I1203 20:20:54.570134 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-69lwf" podStartSLOduration=1.955922054 podStartE2EDuration="5.570103025s" podCreationTimestamp="2025-12-03 20:20:49 +0000 UTC" firstStartedPulling="2025-12-03 20:20:50.388214418 +0000 UTC m=+3066.351024694" lastFinishedPulling="2025-12-03 20:20:54.002395399 +0000 UTC m=+3069.965205665" observedRunningTime="2025-12-03 20:20:54.561473667 +0000 UTC m=+3070.524283943" watchObservedRunningTime="2025-12-03 20:20:54.570103025 +0000 UTC m=+3070.532913321" Dec 03 20:20:56 crc kubenswrapper[4916]: I1203 20:20:56.568440 4916 generic.go:334] "Generic (PLEG): container finished" podID="070ffe05-42a7-471d-a027-886ec97d915c" containerID="307afd2b67d24f67f95001927848f8449eb93f89eca3f711c977872b16529509" exitCode=0 Dec 03 20:20:56 crc kubenswrapper[4916]: I1203 20:20:56.568596 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-69lwf" event={"ID":"070ffe05-42a7-471d-a027-886ec97d915c","Type":"ContainerDied","Data":"307afd2b67d24f67f95001927848f8449eb93f89eca3f711c977872b16529509"} Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.009728 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.051390 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-combined-ca-bundle\") pod \"070ffe05-42a7-471d-a027-886ec97d915c\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.051480 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcpzc\" (UniqueName: \"kubernetes.io/projected/070ffe05-42a7-471d-a027-886ec97d915c-kube-api-access-bcpzc\") pod \"070ffe05-42a7-471d-a027-886ec97d915c\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.051505 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-scripts\") pod \"070ffe05-42a7-471d-a027-886ec97d915c\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.051635 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-config-data\") pod \"070ffe05-42a7-471d-a027-886ec97d915c\" (UID: \"070ffe05-42a7-471d-a027-886ec97d915c\") " Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.059173 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/070ffe05-42a7-471d-a027-886ec97d915c-kube-api-access-bcpzc" (OuterVolumeSpecName: "kube-api-access-bcpzc") pod "070ffe05-42a7-471d-a027-886ec97d915c" (UID: "070ffe05-42a7-471d-a027-886ec97d915c"). InnerVolumeSpecName "kube-api-access-bcpzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.062704 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-scripts" (OuterVolumeSpecName: "scripts") pod "070ffe05-42a7-471d-a027-886ec97d915c" (UID: "070ffe05-42a7-471d-a027-886ec97d915c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.084266 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "070ffe05-42a7-471d-a027-886ec97d915c" (UID: "070ffe05-42a7-471d-a027-886ec97d915c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.101754 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-config-data" (OuterVolumeSpecName: "config-data") pod "070ffe05-42a7-471d-a027-886ec97d915c" (UID: "070ffe05-42a7-471d-a027-886ec97d915c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.154039 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.154216 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcpzc\" (UniqueName: \"kubernetes.io/projected/070ffe05-42a7-471d-a027-886ec97d915c-kube-api-access-bcpzc\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.154342 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.154482 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/070ffe05-42a7-471d-a027-886ec97d915c-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.598293 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-69lwf" event={"ID":"070ffe05-42a7-471d-a027-886ec97d915c","Type":"ContainerDied","Data":"ef9bbe43f963834307cba9aa5267b49981d73b58244b9f670e426484a3d872c2"} Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.598362 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef9bbe43f963834307cba9aa5267b49981d73b58244b9f670e426484a3d872c2" Dec 03 20:20:58 crc kubenswrapper[4916]: I1203 20:20:58.598411 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-69lwf" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.290475 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 03 20:20:59 crc kubenswrapper[4916]: E1203 20:20:59.291337 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="070ffe05-42a7-471d-a027-886ec97d915c" containerName="aodh-db-sync" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.291357 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="070ffe05-42a7-471d-a027-886ec97d915c" containerName="aodh-db-sync" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.291620 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="070ffe05-42a7-471d-a027-886ec97d915c" containerName="aodh-db-sync" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.293797 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.298779 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-cqszc" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.298905 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.299046 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.304819 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.379922 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-scripts\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.379959 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tmtw\" (UniqueName: \"kubernetes.io/projected/9e696c9e-5a09-4140-82d3-c29817dca431-kube-api-access-5tmtw\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.380012 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-config-data\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.380368 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-combined-ca-bundle\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.482046 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-combined-ca-bundle\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.482117 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-scripts\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.482149 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tmtw\" (UniqueName: \"kubernetes.io/projected/9e696c9e-5a09-4140-82d3-c29817dca431-kube-api-access-5tmtw\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.482193 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-config-data\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.488125 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-config-data\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.488589 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-combined-ca-bundle\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.497913 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-scripts\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.506553 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tmtw\" (UniqueName: \"kubernetes.io/projected/9e696c9e-5a09-4140-82d3-c29817dca431-kube-api-access-5tmtw\") pod \"aodh-0\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " pod="openstack/aodh-0" Dec 03 20:20:59 crc kubenswrapper[4916]: I1203 20:20:59.625769 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:21:00 crc kubenswrapper[4916]: I1203 20:21:00.078023 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 20:21:00 crc kubenswrapper[4916]: W1203 20:21:00.080605 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e696c9e_5a09_4140_82d3_c29817dca431.slice/crio-82ba6afbb73c76b89c22ddcdd0858eb67fe4467d1be651951b09eb5bb0dee760 WatchSource:0}: Error finding container 82ba6afbb73c76b89c22ddcdd0858eb67fe4467d1be651951b09eb5bb0dee760: Status 404 returned error can't find the container with id 82ba6afbb73c76b89c22ddcdd0858eb67fe4467d1be651951b09eb5bb0dee760 Dec 03 20:21:00 crc kubenswrapper[4916]: I1203 20:21:00.615982 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9e696c9e-5a09-4140-82d3-c29817dca431","Type":"ContainerStarted","Data":"82ba6afbb73c76b89c22ddcdd0858eb67fe4467d1be651951b09eb5bb0dee760"} Dec 03 20:21:01 crc kubenswrapper[4916]: I1203 20:21:01.266430 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:01 crc kubenswrapper[4916]: I1203 20:21:01.267033 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="ceilometer-central-agent" containerID="cri-o://01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1" gracePeriod=30 Dec 03 20:21:01 crc kubenswrapper[4916]: I1203 20:21:01.267448 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="proxy-httpd" containerID="cri-o://8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153" gracePeriod=30 Dec 03 20:21:01 crc kubenswrapper[4916]: I1203 20:21:01.267507 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="sg-core" containerID="cri-o://0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197" gracePeriod=30 Dec 03 20:21:01 crc kubenswrapper[4916]: I1203 20:21:01.267594 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="ceilometer-notification-agent" containerID="cri-o://a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d" gracePeriod=30 Dec 03 20:21:01 crc kubenswrapper[4916]: I1203 20:21:01.626436 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9e696c9e-5a09-4140-82d3-c29817dca431","Type":"ContainerStarted","Data":"f288b40e4afc980eb10298c267aed176bb5b0f6a8387d686e3d3daa164b9696d"} Dec 03 20:21:01 crc kubenswrapper[4916]: I1203 20:21:01.629293 4916 generic.go:334] "Generic (PLEG): container finished" podID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerID="8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153" exitCode=0 Dec 03 20:21:01 crc kubenswrapper[4916]: I1203 20:21:01.629326 4916 generic.go:334] "Generic (PLEG): container finished" podID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerID="0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197" exitCode=2 Dec 03 20:21:01 crc kubenswrapper[4916]: I1203 20:21:01.629348 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"407a5f58-7c0e-43ae-aa16-008635f450ff","Type":"ContainerDied","Data":"8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153"} Dec 03 20:21:01 crc kubenswrapper[4916]: I1203 20:21:01.629375 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"407a5f58-7c0e-43ae-aa16-008635f450ff","Type":"ContainerDied","Data":"0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197"} Dec 03 20:21:02 crc kubenswrapper[4916]: I1203 20:21:02.422971 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 03 20:21:02 crc kubenswrapper[4916]: I1203 20:21:02.639378 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9e696c9e-5a09-4140-82d3-c29817dca431","Type":"ContainerStarted","Data":"1a4e4a25f88b2360eb4a94c174974450b6d5d5740d7596a38f26a53ebd16f996"} Dec 03 20:21:02 crc kubenswrapper[4916]: I1203 20:21:02.642344 4916 generic.go:334] "Generic (PLEG): container finished" podID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerID="01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1" exitCode=0 Dec 03 20:21:02 crc kubenswrapper[4916]: I1203 20:21:02.642423 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"407a5f58-7c0e-43ae-aa16-008635f450ff","Type":"ContainerDied","Data":"01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1"} Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.364455 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.478759 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:21:03 crc kubenswrapper[4916]: E1203 20:21:03.479178 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.484107 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-log-httpd\") pod \"407a5f58-7c0e-43ae-aa16-008635f450ff\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.484210 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-ceilometer-tls-certs\") pod \"407a5f58-7c0e-43ae-aa16-008635f450ff\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.484231 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-scripts\") pod \"407a5f58-7c0e-43ae-aa16-008635f450ff\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.484358 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-sg-core-conf-yaml\") pod \"407a5f58-7c0e-43ae-aa16-008635f450ff\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.484386 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6972\" (UniqueName: \"kubernetes.io/projected/407a5f58-7c0e-43ae-aa16-008635f450ff-kube-api-access-w6972\") pod \"407a5f58-7c0e-43ae-aa16-008635f450ff\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.484404 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-config-data\") pod \"407a5f58-7c0e-43ae-aa16-008635f450ff\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.484453 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-run-httpd\") pod \"407a5f58-7c0e-43ae-aa16-008635f450ff\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.484538 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-combined-ca-bundle\") pod \"407a5f58-7c0e-43ae-aa16-008635f450ff\" (UID: \"407a5f58-7c0e-43ae-aa16-008635f450ff\") " Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.484537 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "407a5f58-7c0e-43ae-aa16-008635f450ff" (UID: "407a5f58-7c0e-43ae-aa16-008635f450ff"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.484893 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "407a5f58-7c0e-43ae-aa16-008635f450ff" (UID: "407a5f58-7c0e-43ae-aa16-008635f450ff"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.484962 4916 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.489161 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/407a5f58-7c0e-43ae-aa16-008635f450ff-kube-api-access-w6972" (OuterVolumeSpecName: "kube-api-access-w6972") pod "407a5f58-7c0e-43ae-aa16-008635f450ff" (UID: "407a5f58-7c0e-43ae-aa16-008635f450ff"). InnerVolumeSpecName "kube-api-access-w6972". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.493678 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-scripts" (OuterVolumeSpecName: "scripts") pod "407a5f58-7c0e-43ae-aa16-008635f450ff" (UID: "407a5f58-7c0e-43ae-aa16-008635f450ff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.509904 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "407a5f58-7c0e-43ae-aa16-008635f450ff" (UID: "407a5f58-7c0e-43ae-aa16-008635f450ff"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.560211 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "407a5f58-7c0e-43ae-aa16-008635f450ff" (UID: "407a5f58-7c0e-43ae-aa16-008635f450ff"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.591648 4916 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/407a5f58-7c0e-43ae-aa16-008635f450ff-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.591820 4916 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.591878 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.591949 4916 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.592086 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6972\" (UniqueName: \"kubernetes.io/projected/407a5f58-7c0e-43ae-aa16-008635f450ff-kube-api-access-w6972\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.595778 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "407a5f58-7c0e-43ae-aa16-008635f450ff" (UID: "407a5f58-7c0e-43ae-aa16-008635f450ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.632418 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-config-data" (OuterVolumeSpecName: "config-data") pod "407a5f58-7c0e-43ae-aa16-008635f450ff" (UID: "407a5f58-7c0e-43ae-aa16-008635f450ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.654282 4916 generic.go:334] "Generic (PLEG): container finished" podID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerID="a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d" exitCode=0 Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.654324 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"407a5f58-7c0e-43ae-aa16-008635f450ff","Type":"ContainerDied","Data":"a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d"} Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.654349 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"407a5f58-7c0e-43ae-aa16-008635f450ff","Type":"ContainerDied","Data":"679af06bafa6ffc20c9115a199e152455f7d8f8e8cb678ce10edf3d037a1dbdb"} Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.654365 4916 scope.go:117] "RemoveContainer" containerID="8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.654441 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.700074 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.700129 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/407a5f58-7c0e-43ae-aa16-008635f450ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.700324 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.724676 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.750504 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:03 crc kubenswrapper[4916]: E1203 20:21:03.750979 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="ceilometer-central-agent" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.750996 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="ceilometer-central-agent" Dec 03 20:21:03 crc kubenswrapper[4916]: E1203 20:21:03.751016 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="sg-core" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.751022 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="sg-core" Dec 03 20:21:03 crc kubenswrapper[4916]: E1203 20:21:03.751043 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="ceilometer-notification-agent" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.751050 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="ceilometer-notification-agent" Dec 03 20:21:03 crc kubenswrapper[4916]: E1203 20:21:03.751067 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="proxy-httpd" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.751075 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="proxy-httpd" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.751253 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="ceilometer-central-agent" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.751269 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="ceilometer-notification-agent" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.751282 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="sg-core" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.751293 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" containerName="proxy-httpd" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.753071 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.755518 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.755929 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.756263 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.791782 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.804770 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.804836 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.804860 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-log-httpd\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.804895 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-config-data\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.804916 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9v9d\" (UniqueName: \"kubernetes.io/projected/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-kube-api-access-b9v9d\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.804949 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-scripts\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.804993 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.805022 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-run-httpd\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.906961 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.907012 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-run-httpd\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.907111 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.907132 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.907150 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-log-httpd\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.907175 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-config-data\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.907193 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9v9d\" (UniqueName: \"kubernetes.io/projected/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-kube-api-access-b9v9d\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.907221 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-scripts\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.908428 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-log-httpd\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.908468 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-run-httpd\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.911086 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-config-data\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.912979 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.914088 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.914439 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.917457 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-scripts\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.923442 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9v9d\" (UniqueName: \"kubernetes.io/projected/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-kube-api-access-b9v9d\") pod \"ceilometer-0\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " pod="openstack/ceilometer-0" Dec 03 20:21:03 crc kubenswrapper[4916]: I1203 20:21:03.964921 4916 scope.go:117] "RemoveContainer" containerID="0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.016307 4916 scope.go:117] "RemoveContainer" containerID="a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.083727 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.183285 4916 scope.go:117] "RemoveContainer" containerID="01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.229785 4916 scope.go:117] "RemoveContainer" containerID="8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153" Dec 03 20:21:04 crc kubenswrapper[4916]: E1203 20:21:04.231260 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153\": container with ID starting with 8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153 not found: ID does not exist" containerID="8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.231327 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153"} err="failed to get container status \"8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153\": rpc error: code = NotFound desc = could not find container \"8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153\": container with ID starting with 8b7e5a3a2e691f23ff1455597d73776028f232b346602ac969c0ae9b95788153 not found: ID does not exist" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.231355 4916 scope.go:117] "RemoveContainer" containerID="0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197" Dec 03 20:21:04 crc kubenswrapper[4916]: E1203 20:21:04.231620 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197\": container with ID starting with 0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197 not found: ID does not exist" containerID="0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.231664 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197"} err="failed to get container status \"0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197\": rpc error: code = NotFound desc = could not find container \"0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197\": container with ID starting with 0675f88c0bdf7c447eaf7410979d39a570b29f13d58db0402376b543996d4197 not found: ID does not exist" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.231677 4916 scope.go:117] "RemoveContainer" containerID="a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d" Dec 03 20:21:04 crc kubenswrapper[4916]: E1203 20:21:04.231857 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d\": container with ID starting with a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d not found: ID does not exist" containerID="a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.231872 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d"} err="failed to get container status \"a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d\": rpc error: code = NotFound desc = could not find container \"a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d\": container with ID starting with a2aba8bc62e56accaf9eb5b03b20d8b9398250b8aba3241f111808d0d9cb410d not found: ID does not exist" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.231884 4916 scope.go:117] "RemoveContainer" containerID="01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1" Dec 03 20:21:04 crc kubenswrapper[4916]: E1203 20:21:04.232029 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1\": container with ID starting with 01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1 not found: ID does not exist" containerID="01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.232045 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1"} err="failed to get container status \"01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1\": rpc error: code = NotFound desc = could not find container \"01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1\": container with ID starting with 01eb607ec2173ca0d5d9b4caed4ede01656f8c54e66a21cdfd4752469e44d8a1 not found: ID does not exist" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.497253 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="407a5f58-7c0e-43ae-aa16-008635f450ff" path="/var/lib/kubelet/pods/407a5f58-7c0e-43ae-aa16-008635f450ff/volumes" Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.562637 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:04 crc kubenswrapper[4916]: W1203 20:21:04.562915 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97e1e3fd_d4ee_4fee_8ec6_cb6c21f4708a.slice/crio-f104e2ea8e0f28a1c799cf8591a1533182983783ba32ccf6a0910c09b94850ef WatchSource:0}: Error finding container f104e2ea8e0f28a1c799cf8591a1533182983783ba32ccf6a0910c09b94850ef: Status 404 returned error can't find the container with id f104e2ea8e0f28a1c799cf8591a1533182983783ba32ccf6a0910c09b94850ef Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.663608 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a","Type":"ContainerStarted","Data":"f104e2ea8e0f28a1c799cf8591a1533182983783ba32ccf6a0910c09b94850ef"} Dec 03 20:21:04 crc kubenswrapper[4916]: I1203 20:21:04.665680 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9e696c9e-5a09-4140-82d3-c29817dca431","Type":"ContainerStarted","Data":"4775fd58348b9a447fb25ac4ff5693e09205a4fdb46811ba807af708754586c6"} Dec 03 20:21:05 crc kubenswrapper[4916]: I1203 20:21:05.003284 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:05 crc kubenswrapper[4916]: I1203 20:21:05.679738 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a","Type":"ContainerStarted","Data":"100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593"} Dec 03 20:21:06 crc kubenswrapper[4916]: I1203 20:21:06.691504 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a","Type":"ContainerStarted","Data":"6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927"} Dec 03 20:21:06 crc kubenswrapper[4916]: I1203 20:21:06.694089 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9e696c9e-5a09-4140-82d3-c29817dca431","Type":"ContainerStarted","Data":"0e1ebb26b42d3ed1efef45df342fc5d7183f1961865c8f7dc95e591dc8769bcd"} Dec 03 20:21:06 crc kubenswrapper[4916]: I1203 20:21:06.694202 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-api" containerID="cri-o://f288b40e4afc980eb10298c267aed176bb5b0f6a8387d686e3d3daa164b9696d" gracePeriod=30 Dec 03 20:21:06 crc kubenswrapper[4916]: I1203 20:21:06.694607 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-listener" containerID="cri-o://0e1ebb26b42d3ed1efef45df342fc5d7183f1961865c8f7dc95e591dc8769bcd" gracePeriod=30 Dec 03 20:21:06 crc kubenswrapper[4916]: I1203 20:21:06.694651 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-notifier" containerID="cri-o://4775fd58348b9a447fb25ac4ff5693e09205a4fdb46811ba807af708754586c6" gracePeriod=30 Dec 03 20:21:06 crc kubenswrapper[4916]: I1203 20:21:06.694682 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-evaluator" containerID="cri-o://1a4e4a25f88b2360eb4a94c174974450b6d5d5740d7596a38f26a53ebd16f996" gracePeriod=30 Dec 03 20:21:06 crc kubenswrapper[4916]: I1203 20:21:06.726273 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.200127212 podStartE2EDuration="7.726258587s" podCreationTimestamp="2025-12-03 20:20:59 +0000 UTC" firstStartedPulling="2025-12-03 20:21:00.084865181 +0000 UTC m=+3076.047675447" lastFinishedPulling="2025-12-03 20:21:05.610996556 +0000 UTC m=+3081.573806822" observedRunningTime="2025-12-03 20:21:06.724915861 +0000 UTC m=+3082.687726147" watchObservedRunningTime="2025-12-03 20:21:06.726258587 +0000 UTC m=+3082.689068853" Dec 03 20:21:07 crc kubenswrapper[4916]: I1203 20:21:07.706278 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a","Type":"ContainerStarted","Data":"69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884"} Dec 03 20:21:07 crc kubenswrapper[4916]: I1203 20:21:07.708896 4916 generic.go:334] "Generic (PLEG): container finished" podID="9e696c9e-5a09-4140-82d3-c29817dca431" containerID="4775fd58348b9a447fb25ac4ff5693e09205a4fdb46811ba807af708754586c6" exitCode=0 Dec 03 20:21:07 crc kubenswrapper[4916]: I1203 20:21:07.708947 4916 generic.go:334] "Generic (PLEG): container finished" podID="9e696c9e-5a09-4140-82d3-c29817dca431" containerID="1a4e4a25f88b2360eb4a94c174974450b6d5d5740d7596a38f26a53ebd16f996" exitCode=0 Dec 03 20:21:07 crc kubenswrapper[4916]: I1203 20:21:07.708961 4916 generic.go:334] "Generic (PLEG): container finished" podID="9e696c9e-5a09-4140-82d3-c29817dca431" containerID="f288b40e4afc980eb10298c267aed176bb5b0f6a8387d686e3d3daa164b9696d" exitCode=0 Dec 03 20:21:07 crc kubenswrapper[4916]: I1203 20:21:07.708996 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9e696c9e-5a09-4140-82d3-c29817dca431","Type":"ContainerDied","Data":"4775fd58348b9a447fb25ac4ff5693e09205a4fdb46811ba807af708754586c6"} Dec 03 20:21:07 crc kubenswrapper[4916]: I1203 20:21:07.709061 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9e696c9e-5a09-4140-82d3-c29817dca431","Type":"ContainerDied","Data":"1a4e4a25f88b2360eb4a94c174974450b6d5d5740d7596a38f26a53ebd16f996"} Dec 03 20:21:07 crc kubenswrapper[4916]: I1203 20:21:07.709081 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9e696c9e-5a09-4140-82d3-c29817dca431","Type":"ContainerDied","Data":"f288b40e4afc980eb10298c267aed176bb5b0f6a8387d686e3d3daa164b9696d"} Dec 03 20:21:08 crc kubenswrapper[4916]: I1203 20:21:08.722348 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a","Type":"ContainerStarted","Data":"35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487"} Dec 03 20:21:08 crc kubenswrapper[4916]: I1203 20:21:08.722699 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="proxy-httpd" containerID="cri-o://35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487" gracePeriod=30 Dec 03 20:21:08 crc kubenswrapper[4916]: I1203 20:21:08.722734 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 20:21:08 crc kubenswrapper[4916]: I1203 20:21:08.722739 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="ceilometer-notification-agent" containerID="cri-o://6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927" gracePeriod=30 Dec 03 20:21:08 crc kubenswrapper[4916]: I1203 20:21:08.722757 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="sg-core" containerID="cri-o://69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884" gracePeriod=30 Dec 03 20:21:08 crc kubenswrapper[4916]: I1203 20:21:08.722766 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="ceilometer-central-agent" containerID="cri-o://100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593" gracePeriod=30 Dec 03 20:21:08 crc kubenswrapper[4916]: I1203 20:21:08.767725 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.127394534 podStartE2EDuration="5.767707846s" podCreationTimestamp="2025-12-03 20:21:03 +0000 UTC" firstStartedPulling="2025-12-03 20:21:04.565332897 +0000 UTC m=+3080.528143163" lastFinishedPulling="2025-12-03 20:21:08.205646189 +0000 UTC m=+3084.168456475" observedRunningTime="2025-12-03 20:21:08.750631074 +0000 UTC m=+3084.713441340" watchObservedRunningTime="2025-12-03 20:21:08.767707846 +0000 UTC m=+3084.730518112" Dec 03 20:21:09 crc kubenswrapper[4916]: I1203 20:21:09.765770 4916 generic.go:334] "Generic (PLEG): container finished" podID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerID="35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487" exitCode=0 Dec 03 20:21:09 crc kubenswrapper[4916]: I1203 20:21:09.766089 4916 generic.go:334] "Generic (PLEG): container finished" podID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerID="69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884" exitCode=2 Dec 03 20:21:09 crc kubenswrapper[4916]: I1203 20:21:09.766101 4916 generic.go:334] "Generic (PLEG): container finished" podID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerID="6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927" exitCode=0 Dec 03 20:21:09 crc kubenswrapper[4916]: I1203 20:21:09.766125 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a","Type":"ContainerDied","Data":"35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487"} Dec 03 20:21:09 crc kubenswrapper[4916]: I1203 20:21:09.766155 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a","Type":"ContainerDied","Data":"69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884"} Dec 03 20:21:09 crc kubenswrapper[4916]: I1203 20:21:09.766168 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a","Type":"ContainerDied","Data":"6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927"} Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.737118 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.805643 4916 generic.go:334] "Generic (PLEG): container finished" podID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerID="100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593" exitCode=0 Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.805837 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a","Type":"ContainerDied","Data":"100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593"} Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.805966 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.805996 4916 scope.go:117] "RemoveContainer" containerID="35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.805974 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a","Type":"ContainerDied","Data":"f104e2ea8e0f28a1c799cf8591a1533182983783ba32ccf6a0910c09b94850ef"} Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.817632 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-run-httpd\") pod \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.817866 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9v9d\" (UniqueName: \"kubernetes.io/projected/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-kube-api-access-b9v9d\") pod \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.817940 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-config-data\") pod \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.817983 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-combined-ca-bundle\") pod \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.818074 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-ceilometer-tls-certs\") pod \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.818110 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-sg-core-conf-yaml\") pod \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.818236 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-log-httpd\") pod \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.818299 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-scripts\") pod \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\" (UID: \"97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a\") " Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.818460 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" (UID: "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.819137 4916 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.820520 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" (UID: "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.824954 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-kube-api-access-b9v9d" (OuterVolumeSpecName: "kube-api-access-b9v9d") pod "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" (UID: "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a"). InnerVolumeSpecName "kube-api-access-b9v9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.826757 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-scripts" (OuterVolumeSpecName: "scripts") pod "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" (UID: "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.854485 4916 scope.go:117] "RemoveContainer" containerID="69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.868265 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" (UID: "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.900479 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" (UID: "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.922011 4916 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.922058 4916 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.922075 4916 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.922093 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.922107 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9v9d\" (UniqueName: \"kubernetes.io/projected/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-kube-api-access-b9v9d\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.942352 4916 scope.go:117] "RemoveContainer" containerID="6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.967280 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" (UID: "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.967859 4916 scope.go:117] "RemoveContainer" containerID="100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.987482 4916 scope.go:117] "RemoveContainer" containerID="35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487" Dec 03 20:21:12 crc kubenswrapper[4916]: E1203 20:21:12.987909 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487\": container with ID starting with 35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487 not found: ID does not exist" containerID="35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.987945 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487"} err="failed to get container status \"35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487\": rpc error: code = NotFound desc = could not find container \"35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487\": container with ID starting with 35cc8008c57cf50d4616affeb7288106d84a0a4c2e0153ecd0868f467c428487 not found: ID does not exist" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.987971 4916 scope.go:117] "RemoveContainer" containerID="69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884" Dec 03 20:21:12 crc kubenswrapper[4916]: E1203 20:21:12.988421 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884\": container with ID starting with 69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884 not found: ID does not exist" containerID="69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.988454 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884"} err="failed to get container status \"69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884\": rpc error: code = NotFound desc = could not find container \"69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884\": container with ID starting with 69e7905a2390d0c1e4af5792c72c3814accf247f2c28efefe729aa92f7caf884 not found: ID does not exist" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.988470 4916 scope.go:117] "RemoveContainer" containerID="6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.988622 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-config-data" (OuterVolumeSpecName: "config-data") pod "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" (UID: "97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:12 crc kubenswrapper[4916]: E1203 20:21:12.988950 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927\": container with ID starting with 6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927 not found: ID does not exist" containerID="6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.988982 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927"} err="failed to get container status \"6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927\": rpc error: code = NotFound desc = could not find container \"6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927\": container with ID starting with 6a8b39247ebbfe11dd1aca96ae279935c4ca66165cd80921de06638460ea6927 not found: ID does not exist" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.989001 4916 scope.go:117] "RemoveContainer" containerID="100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593" Dec 03 20:21:12 crc kubenswrapper[4916]: E1203 20:21:12.989384 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593\": container with ID starting with 100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593 not found: ID does not exist" containerID="100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593" Dec 03 20:21:12 crc kubenswrapper[4916]: I1203 20:21:12.989414 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593"} err="failed to get container status \"100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593\": rpc error: code = NotFound desc = could not find container \"100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593\": container with ID starting with 100a61f84b55ffc8e4695446b30f1e8e20f31ed7c07a5a21032946a50e13c593 not found: ID does not exist" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.024195 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.024231 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.165398 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.177257 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.191879 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:13 crc kubenswrapper[4916]: E1203 20:21:13.192360 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="sg-core" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.192385 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="sg-core" Dec 03 20:21:13 crc kubenswrapper[4916]: E1203 20:21:13.192423 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="ceilometer-notification-agent" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.192433 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="ceilometer-notification-agent" Dec 03 20:21:13 crc kubenswrapper[4916]: E1203 20:21:13.192453 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="ceilometer-central-agent" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.192461 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="ceilometer-central-agent" Dec 03 20:21:13 crc kubenswrapper[4916]: E1203 20:21:13.192473 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="proxy-httpd" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.192481 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="proxy-httpd" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.192704 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="sg-core" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.192744 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="ceilometer-notification-agent" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.192760 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="ceilometer-central-agent" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.192768 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" containerName="proxy-httpd" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.194405 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.196857 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.196928 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.197068 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.208537 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.329682 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.329727 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrpnm\" (UniqueName: \"kubernetes.io/projected/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-kube-api-access-xrpnm\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.329749 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-run-httpd\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.329804 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-config-data\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.329891 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-log-httpd\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.329920 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-scripts\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.330432 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.330610 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.432795 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.433430 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrpnm\" (UniqueName: \"kubernetes.io/projected/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-kube-api-access-xrpnm\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.433638 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-run-httpd\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.433778 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-config-data\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.433916 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-log-httpd\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.434072 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-scripts\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.434270 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.434430 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.434318 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-run-httpd\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.434597 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-log-httpd\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.437672 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.439587 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.441263 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-config-data\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.449493 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.451066 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-scripts\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.465673 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrpnm\" (UniqueName: \"kubernetes.io/projected/6f01d12e-f1c3-4da2-b3bc-31623e4a2493-kube-api-access-xrpnm\") pod \"ceilometer-0\" (UID: \"6f01d12e-f1c3-4da2-b3bc-31623e4a2493\") " pod="openstack/ceilometer-0" Dec 03 20:21:13 crc kubenswrapper[4916]: I1203 20:21:13.530779 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 20:21:14 crc kubenswrapper[4916]: I1203 20:21:14.048693 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 20:21:14 crc kubenswrapper[4916]: W1203 20:21:14.054102 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f01d12e_f1c3_4da2_b3bc_31623e4a2493.slice/crio-184972585b94089606ac7d7c56e8f5d58ec228cd8579261877dc5d0d0d5ca9f0 WatchSource:0}: Error finding container 184972585b94089606ac7d7c56e8f5d58ec228cd8579261877dc5d0d0d5ca9f0: Status 404 returned error can't find the container with id 184972585b94089606ac7d7c56e8f5d58ec228cd8579261877dc5d0d0d5ca9f0 Dec 03 20:21:14 crc kubenswrapper[4916]: I1203 20:21:14.507067 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a" path="/var/lib/kubelet/pods/97e1e3fd-d4ee-4fee-8ec6-cb6c21f4708a/volumes" Dec 03 20:21:14 crc kubenswrapper[4916]: I1203 20:21:14.835390 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f01d12e-f1c3-4da2-b3bc-31623e4a2493","Type":"ContainerStarted","Data":"cac6258135a3b6e6471ca15daeb04e3e9eabee10014890d1954f8efda1f988bc"} Dec 03 20:21:14 crc kubenswrapper[4916]: I1203 20:21:14.835440 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f01d12e-f1c3-4da2-b3bc-31623e4a2493","Type":"ContainerStarted","Data":"184972585b94089606ac7d7c56e8f5d58ec228cd8579261877dc5d0d0d5ca9f0"} Dec 03 20:21:15 crc kubenswrapper[4916]: I1203 20:21:15.479235 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:21:15 crc kubenswrapper[4916]: E1203 20:21:15.480210 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:21:17 crc kubenswrapper[4916]: I1203 20:21:17.876104 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f01d12e-f1c3-4da2-b3bc-31623e4a2493","Type":"ContainerStarted","Data":"86d3efdbbb1a46ae83ea67acf47a3bf5ed07847ea268299536372b37276e9a4d"} Dec 03 20:21:18 crc kubenswrapper[4916]: I1203 20:21:18.892402 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f01d12e-f1c3-4da2-b3bc-31623e4a2493","Type":"ContainerStarted","Data":"df7f8a056231724820589b9ccb7ed2d37eee36e63c2028876acb9bf2e3861262"} Dec 03 20:21:19 crc kubenswrapper[4916]: I1203 20:21:19.908750 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6f01d12e-f1c3-4da2-b3bc-31623e4a2493","Type":"ContainerStarted","Data":"e9c0f90b84d7f1e84385d0536c48cc04af46409aaa4e4fd2f6a0f0979ba0c384"} Dec 03 20:21:19 crc kubenswrapper[4916]: I1203 20:21:19.909260 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 20:21:19 crc kubenswrapper[4916]: I1203 20:21:19.958455 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.987825508 podStartE2EDuration="6.958430969s" podCreationTimestamp="2025-12-03 20:21:13 +0000 UTC" firstStartedPulling="2025-12-03 20:21:14.057577522 +0000 UTC m=+3090.020387798" lastFinishedPulling="2025-12-03 20:21:19.028182953 +0000 UTC m=+3094.990993259" observedRunningTime="2025-12-03 20:21:19.951598508 +0000 UTC m=+3095.914408814" watchObservedRunningTime="2025-12-03 20:21:19.958430969 +0000 UTC m=+3095.921241265" Dec 03 20:21:28 crc kubenswrapper[4916]: I1203 20:21:28.479081 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:21:28 crc kubenswrapper[4916]: E1203 20:21:28.480201 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.149755 4916 generic.go:334] "Generic (PLEG): container finished" podID="9e696c9e-5a09-4140-82d3-c29817dca431" containerID="0e1ebb26b42d3ed1efef45df342fc5d7183f1961865c8f7dc95e591dc8769bcd" exitCode=137 Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.149828 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9e696c9e-5a09-4140-82d3-c29817dca431","Type":"ContainerDied","Data":"0e1ebb26b42d3ed1efef45df342fc5d7183f1961865c8f7dc95e591dc8769bcd"} Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.648896 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.783671 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-combined-ca-bundle\") pod \"9e696c9e-5a09-4140-82d3-c29817dca431\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.783786 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-scripts\") pod \"9e696c9e-5a09-4140-82d3-c29817dca431\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.784009 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-config-data\") pod \"9e696c9e-5a09-4140-82d3-c29817dca431\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.784083 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tmtw\" (UniqueName: \"kubernetes.io/projected/9e696c9e-5a09-4140-82d3-c29817dca431-kube-api-access-5tmtw\") pod \"9e696c9e-5a09-4140-82d3-c29817dca431\" (UID: \"9e696c9e-5a09-4140-82d3-c29817dca431\") " Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.790765 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-scripts" (OuterVolumeSpecName: "scripts") pod "9e696c9e-5a09-4140-82d3-c29817dca431" (UID: "9e696c9e-5a09-4140-82d3-c29817dca431"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.816786 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e696c9e-5a09-4140-82d3-c29817dca431-kube-api-access-5tmtw" (OuterVolumeSpecName: "kube-api-access-5tmtw") pod "9e696c9e-5a09-4140-82d3-c29817dca431" (UID: "9e696c9e-5a09-4140-82d3-c29817dca431"). InnerVolumeSpecName "kube-api-access-5tmtw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.886320 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tmtw\" (UniqueName: \"kubernetes.io/projected/9e696c9e-5a09-4140-82d3-c29817dca431-kube-api-access-5tmtw\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.886348 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.899514 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e696c9e-5a09-4140-82d3-c29817dca431" (UID: "9e696c9e-5a09-4140-82d3-c29817dca431"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.904471 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-config-data" (OuterVolumeSpecName: "config-data") pod "9e696c9e-5a09-4140-82d3-c29817dca431" (UID: "9e696c9e-5a09-4140-82d3-c29817dca431"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.987726 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:37 crc kubenswrapper[4916]: I1203 20:21:37.988100 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e696c9e-5a09-4140-82d3-c29817dca431-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.164304 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"9e696c9e-5a09-4140-82d3-c29817dca431","Type":"ContainerDied","Data":"82ba6afbb73c76b89c22ddcdd0858eb67fe4467d1be651951b09eb5bb0dee760"} Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.164358 4916 scope.go:117] "RemoveContainer" containerID="0e1ebb26b42d3ed1efef45df342fc5d7183f1961865c8f7dc95e591dc8769bcd" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.164439 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.193723 4916 scope.go:117] "RemoveContainer" containerID="4775fd58348b9a447fb25ac4ff5693e09205a4fdb46811ba807af708754586c6" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.224469 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.239234 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-0"] Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.239239 4916 scope.go:117] "RemoveContainer" containerID="1a4e4a25f88b2360eb4a94c174974450b6d5d5740d7596a38f26a53ebd16f996" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.256562 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 03 20:21:38 crc kubenswrapper[4916]: E1203 20:21:38.257003 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-evaluator" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.257024 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-evaluator" Dec 03 20:21:38 crc kubenswrapper[4916]: E1203 20:21:38.257046 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-listener" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.257053 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-listener" Dec 03 20:21:38 crc kubenswrapper[4916]: E1203 20:21:38.257081 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-api" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.257089 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-api" Dec 03 20:21:38 crc kubenswrapper[4916]: E1203 20:21:38.257111 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-notifier" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.257120 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-notifier" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.257390 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-evaluator" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.257415 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-listener" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.257433 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-api" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.257453 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" containerName="aodh-notifier" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.260474 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.277393 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.314153 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-cqszc" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.314344 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.314421 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.314646 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-internal-svc" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.314904 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-public-svc" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.333426 4916 scope.go:117] "RemoveContainer" containerID="f288b40e4afc980eb10298c267aed176bb5b0f6a8387d686e3d3daa164b9696d" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.415270 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-internal-tls-certs\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.415393 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-combined-ca-bundle\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.415726 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-config-data\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.415898 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhb4d\" (UniqueName: \"kubernetes.io/projected/6dbf29e4-a2e3-4882-9513-5d39d513451a-kube-api-access-vhb4d\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.416133 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-scripts\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.416177 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-public-tls-certs\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.493792 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e696c9e-5a09-4140-82d3-c29817dca431" path="/var/lib/kubelet/pods/9e696c9e-5a09-4140-82d3-c29817dca431/volumes" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.518211 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-internal-tls-certs\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.518316 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-combined-ca-bundle\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.518407 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-config-data\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.518482 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhb4d\" (UniqueName: \"kubernetes.io/projected/6dbf29e4-a2e3-4882-9513-5d39d513451a-kube-api-access-vhb4d\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.518661 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-scripts\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.518763 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-public-tls-certs\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.523127 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-combined-ca-bundle\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.523457 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-config-data\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.523587 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-scripts\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.523687 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-public-tls-certs\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.523975 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-internal-tls-certs\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.544210 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhb4d\" (UniqueName: \"kubernetes.io/projected/6dbf29e4-a2e3-4882-9513-5d39d513451a-kube-api-access-vhb4d\") pod \"aodh-0\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " pod="openstack/aodh-0" Dec 03 20:21:38 crc kubenswrapper[4916]: I1203 20:21:38.624862 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:21:39 crc kubenswrapper[4916]: I1203 20:21:39.109560 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 20:21:39 crc kubenswrapper[4916]: W1203 20:21:39.121803 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6dbf29e4_a2e3_4882_9513_5d39d513451a.slice/crio-30c93c027738cf0058100870b58c3e6458404b44bba6a0eb671b2ff7fd5c5e67 WatchSource:0}: Error finding container 30c93c027738cf0058100870b58c3e6458404b44bba6a0eb671b2ff7fd5c5e67: Status 404 returned error can't find the container with id 30c93c027738cf0058100870b58c3e6458404b44bba6a0eb671b2ff7fd5c5e67 Dec 03 20:21:39 crc kubenswrapper[4916]: I1203 20:21:39.184677 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"6dbf29e4-a2e3-4882-9513-5d39d513451a","Type":"ContainerStarted","Data":"30c93c027738cf0058100870b58c3e6458404b44bba6a0eb671b2ff7fd5c5e67"} Dec 03 20:21:40 crc kubenswrapper[4916]: I1203 20:21:40.198637 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"6dbf29e4-a2e3-4882-9513-5d39d513451a","Type":"ContainerStarted","Data":"b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32"} Dec 03 20:21:40 crc kubenswrapper[4916]: I1203 20:21:40.478622 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:21:40 crc kubenswrapper[4916]: E1203 20:21:40.478930 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:21:41 crc kubenswrapper[4916]: I1203 20:21:41.213002 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"6dbf29e4-a2e3-4882-9513-5d39d513451a","Type":"ContainerStarted","Data":"63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188"} Dec 03 20:21:42 crc kubenswrapper[4916]: I1203 20:21:42.223973 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"6dbf29e4-a2e3-4882-9513-5d39d513451a","Type":"ContainerStarted","Data":"66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779"} Dec 03 20:21:43 crc kubenswrapper[4916]: I1203 20:21:43.241241 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"6dbf29e4-a2e3-4882-9513-5d39d513451a","Type":"ContainerStarted","Data":"e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7"} Dec 03 20:21:43 crc kubenswrapper[4916]: I1203 20:21:43.282823 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.461159361 podStartE2EDuration="5.282797988s" podCreationTimestamp="2025-12-03 20:21:38 +0000 UTC" firstStartedPulling="2025-12-03 20:21:39.126819606 +0000 UTC m=+3115.089629872" lastFinishedPulling="2025-12-03 20:21:41.948458212 +0000 UTC m=+3117.911268499" observedRunningTime="2025-12-03 20:21:43.268145011 +0000 UTC m=+3119.230955297" watchObservedRunningTime="2025-12-03 20:21:43.282797988 +0000 UTC m=+3119.245608284" Dec 03 20:21:43 crc kubenswrapper[4916]: I1203 20:21:43.543686 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 20:21:53 crc kubenswrapper[4916]: I1203 20:21:53.478509 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:21:53 crc kubenswrapper[4916]: E1203 20:21:53.479519 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:21:54 crc kubenswrapper[4916]: I1203 20:21:54.157469 4916 scope.go:117] "RemoveContainer" containerID="8b7296411df7f7a1613eeedb24af870dab599a139e6e1d3982b2b5f1b2a1174e" Dec 03 20:21:54 crc kubenswrapper[4916]: I1203 20:21:54.222560 4916 scope.go:117] "RemoveContainer" containerID="8e28f93a31ba2ae0c2e7976e10fea1e4ff0a6458677c52da8a6b2eff3d7f1353" Dec 03 20:21:54 crc kubenswrapper[4916]: I1203 20:21:54.278821 4916 scope.go:117] "RemoveContainer" containerID="5d372511a548b2944c09a9d53100106dc712d9349ad684faca7ebc20d97abfb4" Dec 03 20:22:07 crc kubenswrapper[4916]: I1203 20:22:07.478109 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:22:07 crc kubenswrapper[4916]: E1203 20:22:07.478788 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:22:21 crc kubenswrapper[4916]: I1203 20:22:21.478634 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:22:21 crc kubenswrapper[4916]: E1203 20:22:21.479653 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:22:36 crc kubenswrapper[4916]: I1203 20:22:36.479690 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:22:36 crc kubenswrapper[4916]: E1203 20:22:36.480773 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:22:49 crc kubenswrapper[4916]: I1203 20:22:49.479080 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:22:50 crc kubenswrapper[4916]: I1203 20:22:50.098907 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"ef7f0108f788da434d859e9f496d32e519da7456f6633c29dee92073a20401b4"} Dec 03 20:24:44 crc kubenswrapper[4916]: I1203 20:24:44.809914 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_6f4635b6-2410-4d5f-a7c9-3cf0a04739f7/manager/0.log" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.223643 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c"] Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.227812 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.231427 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.235432 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c"] Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.384611 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.384665 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.384689 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xv5q\" (UniqueName: \"kubernetes.io/projected/b22be183-8473-4ebc-a31b-0e219064f8b2-kube-api-access-2xv5q\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.486763 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.486824 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.486867 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xv5q\" (UniqueName: \"kubernetes.io/projected/b22be183-8473-4ebc-a31b-0e219064f8b2-kube-api-access-2xv5q\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.487230 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.487267 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.520992 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xv5q\" (UniqueName: \"kubernetes.io/projected/b22be183-8473-4ebc-a31b-0e219064f8b2-kube-api-access-2xv5q\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:24:59 crc kubenswrapper[4916]: I1203 20:24:59.553827 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:25:00 crc kubenswrapper[4916]: I1203 20:25:00.029521 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c"] Dec 03 20:25:00 crc kubenswrapper[4916]: I1203 20:25:00.492977 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" event={"ID":"b22be183-8473-4ebc-a31b-0e219064f8b2","Type":"ContainerStarted","Data":"4bc2e259a24b1034945fd7de8b2f260b09bb4807ed03924e35a7280e851a2f74"} Dec 03 20:25:00 crc kubenswrapper[4916]: I1203 20:25:00.494803 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" event={"ID":"b22be183-8473-4ebc-a31b-0e219064f8b2","Type":"ContainerStarted","Data":"2b980134c4824796901315a1d425af4bd7aeb15e5c2de3e77d22ed886df3983a"} Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.496557 4916 generic.go:334] "Generic (PLEG): container finished" podID="b22be183-8473-4ebc-a31b-0e219064f8b2" containerID="4bc2e259a24b1034945fd7de8b2f260b09bb4807ed03924e35a7280e851a2f74" exitCode=0 Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.497655 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" event={"ID":"b22be183-8473-4ebc-a31b-0e219064f8b2","Type":"ContainerDied","Data":"4bc2e259a24b1034945fd7de8b2f260b09bb4807ed03924e35a7280e851a2f74"} Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.584027 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t8vhw"] Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.588004 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.593480 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t8vhw"] Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.650061 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-utilities\") pod \"redhat-operators-t8vhw\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.650171 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5mct\" (UniqueName: \"kubernetes.io/projected/4571c6a8-ef9c-43b9-abb4-81e89651e5af-kube-api-access-d5mct\") pod \"redhat-operators-t8vhw\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.650255 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-catalog-content\") pod \"redhat-operators-t8vhw\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.750869 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-catalog-content\") pod \"redhat-operators-t8vhw\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.750958 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-utilities\") pod \"redhat-operators-t8vhw\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.751021 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5mct\" (UniqueName: \"kubernetes.io/projected/4571c6a8-ef9c-43b9-abb4-81e89651e5af-kube-api-access-d5mct\") pod \"redhat-operators-t8vhw\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.751427 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-catalog-content\") pod \"redhat-operators-t8vhw\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.751462 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-utilities\") pod \"redhat-operators-t8vhw\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.768191 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5mct\" (UniqueName: \"kubernetes.io/projected/4571c6a8-ef9c-43b9-abb4-81e89651e5af-kube-api-access-d5mct\") pod \"redhat-operators-t8vhw\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:01 crc kubenswrapper[4916]: I1203 20:25:01.927112 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:02 crc kubenswrapper[4916]: W1203 20:25:02.390067 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4571c6a8_ef9c_43b9_abb4_81e89651e5af.slice/crio-0ce06c8a411e968408c76903c7493a5f0a6ef6bd294c16b07bfbccbddaf3cfb5 WatchSource:0}: Error finding container 0ce06c8a411e968408c76903c7493a5f0a6ef6bd294c16b07bfbccbddaf3cfb5: Status 404 returned error can't find the container with id 0ce06c8a411e968408c76903c7493a5f0a6ef6bd294c16b07bfbccbddaf3cfb5 Dec 03 20:25:02 crc kubenswrapper[4916]: I1203 20:25:02.404742 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t8vhw"] Dec 03 20:25:02 crc kubenswrapper[4916]: I1203 20:25:02.505967 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8vhw" event={"ID":"4571c6a8-ef9c-43b9-abb4-81e89651e5af","Type":"ContainerStarted","Data":"0ce06c8a411e968408c76903c7493a5f0a6ef6bd294c16b07bfbccbddaf3cfb5"} Dec 03 20:25:03 crc kubenswrapper[4916]: I1203 20:25:03.516935 4916 generic.go:334] "Generic (PLEG): container finished" podID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerID="89e29e7eb7db0f1121a0837714f140942a7aaf98cf26fd1bbbc845aec751bc20" exitCode=0 Dec 03 20:25:03 crc kubenswrapper[4916]: I1203 20:25:03.517008 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8vhw" event={"ID":"4571c6a8-ef9c-43b9-abb4-81e89651e5af","Type":"ContainerDied","Data":"89e29e7eb7db0f1121a0837714f140942a7aaf98cf26fd1bbbc845aec751bc20"} Dec 03 20:25:03 crc kubenswrapper[4916]: I1203 20:25:03.519815 4916 generic.go:334] "Generic (PLEG): container finished" podID="b22be183-8473-4ebc-a31b-0e219064f8b2" containerID="463b2ba7f53f5508639250774ea53161a3316a1d20e20e603dd28f0c51786c4e" exitCode=0 Dec 03 20:25:03 crc kubenswrapper[4916]: I1203 20:25:03.519860 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" event={"ID":"b22be183-8473-4ebc-a31b-0e219064f8b2","Type":"ContainerDied","Data":"463b2ba7f53f5508639250774ea53161a3316a1d20e20e603dd28f0c51786c4e"} Dec 03 20:25:04 crc kubenswrapper[4916]: I1203 20:25:04.538532 4916 generic.go:334] "Generic (PLEG): container finished" podID="b22be183-8473-4ebc-a31b-0e219064f8b2" containerID="674bdd4a3b8954dc4d0317b613fec0cc334e1267b56c8b29b8cf51ef254c12c0" exitCode=0 Dec 03 20:25:04 crc kubenswrapper[4916]: I1203 20:25:04.538981 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" event={"ID":"b22be183-8473-4ebc-a31b-0e219064f8b2","Type":"ContainerDied","Data":"674bdd4a3b8954dc4d0317b613fec0cc334e1267b56c8b29b8cf51ef254c12c0"} Dec 03 20:25:05 crc kubenswrapper[4916]: I1203 20:25:05.558955 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8vhw" event={"ID":"4571c6a8-ef9c-43b9-abb4-81e89651e5af","Type":"ContainerStarted","Data":"de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f"} Dec 03 20:25:05 crc kubenswrapper[4916]: I1203 20:25:05.976113 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.138514 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xv5q\" (UniqueName: \"kubernetes.io/projected/b22be183-8473-4ebc-a31b-0e219064f8b2-kube-api-access-2xv5q\") pod \"b22be183-8473-4ebc-a31b-0e219064f8b2\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.139098 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-util\") pod \"b22be183-8473-4ebc-a31b-0e219064f8b2\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.139207 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-bundle\") pod \"b22be183-8473-4ebc-a31b-0e219064f8b2\" (UID: \"b22be183-8473-4ebc-a31b-0e219064f8b2\") " Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.144268 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-bundle" (OuterVolumeSpecName: "bundle") pod "b22be183-8473-4ebc-a31b-0e219064f8b2" (UID: "b22be183-8473-4ebc-a31b-0e219064f8b2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.148082 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b22be183-8473-4ebc-a31b-0e219064f8b2-kube-api-access-2xv5q" (OuterVolumeSpecName: "kube-api-access-2xv5q") pod "b22be183-8473-4ebc-a31b-0e219064f8b2" (UID: "b22be183-8473-4ebc-a31b-0e219064f8b2"). InnerVolumeSpecName "kube-api-access-2xv5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.151426 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-util" (OuterVolumeSpecName: "util") pod "b22be183-8473-4ebc-a31b-0e219064f8b2" (UID: "b22be183-8473-4ebc-a31b-0e219064f8b2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.241718 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xv5q\" (UniqueName: \"kubernetes.io/projected/b22be183-8473-4ebc-a31b-0e219064f8b2-kube-api-access-2xv5q\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.241760 4916 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-util\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.241771 4916 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b22be183-8473-4ebc-a31b-0e219064f8b2-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.570387 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" event={"ID":"b22be183-8473-4ebc-a31b-0e219064f8b2","Type":"ContainerDied","Data":"2b980134c4824796901315a1d425af4bd7aeb15e5c2de3e77d22ed886df3983a"} Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.570427 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b980134c4824796901315a1d425af4bd7aeb15e5c2de3e77d22ed886df3983a" Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.570425 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c" Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.573710 4916 generic.go:334] "Generic (PLEG): container finished" podID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerID="de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f" exitCode=0 Dec 03 20:25:06 crc kubenswrapper[4916]: I1203 20:25:06.573762 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8vhw" event={"ID":"4571c6a8-ef9c-43b9-abb4-81e89651e5af","Type":"ContainerDied","Data":"de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f"} Dec 03 20:25:08 crc kubenswrapper[4916]: I1203 20:25:08.598045 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8vhw" event={"ID":"4571c6a8-ef9c-43b9-abb4-81e89651e5af","Type":"ContainerStarted","Data":"4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7"} Dec 03 20:25:08 crc kubenswrapper[4916]: I1203 20:25:08.630134 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t8vhw" podStartSLOduration=3.027127023 podStartE2EDuration="7.63010211s" podCreationTimestamp="2025-12-03 20:25:01 +0000 UTC" firstStartedPulling="2025-12-03 20:25:03.519173998 +0000 UTC m=+3319.481984274" lastFinishedPulling="2025-12-03 20:25:08.122149085 +0000 UTC m=+3324.084959361" observedRunningTime="2025-12-03 20:25:08.624192444 +0000 UTC m=+3324.587002720" watchObservedRunningTime="2025-12-03 20:25:08.63010211 +0000 UTC m=+3324.592912416" Dec 03 20:25:11 crc kubenswrapper[4916]: I1203 20:25:11.927844 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:11 crc kubenswrapper[4916]: I1203 20:25:11.929213 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:13 crc kubenswrapper[4916]: I1203 20:25:13.019010 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t8vhw" podUID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerName="registry-server" probeResult="failure" output=< Dec 03 20:25:13 crc kubenswrapper[4916]: timeout: failed to connect service ":50051" within 1s Dec 03 20:25:13 crc kubenswrapper[4916]: > Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.158925 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.160544 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.821635 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j"] Dec 03 20:25:16 crc kubenswrapper[4916]: E1203 20:25:16.822146 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22be183-8473-4ebc-a31b-0e219064f8b2" containerName="pull" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.822168 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22be183-8473-4ebc-a31b-0e219064f8b2" containerName="pull" Dec 03 20:25:16 crc kubenswrapper[4916]: E1203 20:25:16.822188 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22be183-8473-4ebc-a31b-0e219064f8b2" containerName="util" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.822198 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22be183-8473-4ebc-a31b-0e219064f8b2" containerName="util" Dec 03 20:25:16 crc kubenswrapper[4916]: E1203 20:25:16.822235 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22be183-8473-4ebc-a31b-0e219064f8b2" containerName="extract" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.822244 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22be183-8473-4ebc-a31b-0e219064f8b2" containerName="extract" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.822602 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22be183-8473-4ebc-a31b-0e219064f8b2" containerName="extract" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.823439 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.826365 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.826365 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-7v2s6" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.826588 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.829244 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt"] Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.830373 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.831914 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.832243 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-grmdn" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.850397 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j"] Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.858154 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5"] Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.859304 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.865985 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt"] Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.877007 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b9a62187-a514-4067-8eae-ed64cd6daa76-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt\" (UID: \"b9a62187-a514-4067-8eae-ed64cd6daa76\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.877050 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b910b03-47fc-4dff-87ca-eed3318f67e5-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5\" (UID: \"8b910b03-47fc-4dff-87ca-eed3318f67e5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.877067 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b910b03-47fc-4dff-87ca-eed3318f67e5-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5\" (UID: \"8b910b03-47fc-4dff-87ca-eed3318f67e5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.877111 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqs7s\" (UniqueName: \"kubernetes.io/projected/e7cb08b3-e6d4-4165-ba93-b35ed50108c7-kube-api-access-cqs7s\") pod \"obo-prometheus-operator-668cf9dfbb-z8g4j\" (UID: \"e7cb08b3-e6d4-4165-ba93-b35ed50108c7\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.877128 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b9a62187-a514-4067-8eae-ed64cd6daa76-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt\" (UID: \"b9a62187-a514-4067-8eae-ed64cd6daa76\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.910834 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5"] Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.982278 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b9a62187-a514-4067-8eae-ed64cd6daa76-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt\" (UID: \"b9a62187-a514-4067-8eae-ed64cd6daa76\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.982551 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b910b03-47fc-4dff-87ca-eed3318f67e5-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5\" (UID: \"8b910b03-47fc-4dff-87ca-eed3318f67e5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.982652 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b910b03-47fc-4dff-87ca-eed3318f67e5-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5\" (UID: \"8b910b03-47fc-4dff-87ca-eed3318f67e5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.982770 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqs7s\" (UniqueName: \"kubernetes.io/projected/e7cb08b3-e6d4-4165-ba93-b35ed50108c7-kube-api-access-cqs7s\") pod \"obo-prometheus-operator-668cf9dfbb-z8g4j\" (UID: \"e7cb08b3-e6d4-4165-ba93-b35ed50108c7\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.982846 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b9a62187-a514-4067-8eae-ed64cd6daa76-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt\" (UID: \"b9a62187-a514-4067-8eae-ed64cd6daa76\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.988979 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8b910b03-47fc-4dff-87ca-eed3318f67e5-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5\" (UID: \"8b910b03-47fc-4dff-87ca-eed3318f67e5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.989116 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8b910b03-47fc-4dff-87ca-eed3318f67e5-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5\" (UID: \"8b910b03-47fc-4dff-87ca-eed3318f67e5\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.992552 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b9a62187-a514-4067-8eae-ed64cd6daa76-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt\" (UID: \"b9a62187-a514-4067-8eae-ed64cd6daa76\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt" Dec 03 20:25:16 crc kubenswrapper[4916]: I1203 20:25:16.992665 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b9a62187-a514-4067-8eae-ed64cd6daa76-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt\" (UID: \"b9a62187-a514-4067-8eae-ed64cd6daa76\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.002414 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqs7s\" (UniqueName: \"kubernetes.io/projected/e7cb08b3-e6d4-4165-ba93-b35ed50108c7-kube-api-access-cqs7s\") pod \"obo-prometheus-operator-668cf9dfbb-z8g4j\" (UID: \"e7cb08b3-e6d4-4165-ba93-b35ed50108c7\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.154363 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.158449 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.177094 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.353163 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-gl8qq"] Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.355885 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.358598 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-8x66c" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.359000 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.473951 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8w9g\" (UniqueName: \"kubernetes.io/projected/02c05089-1da5-466d-ae93-bd7b99d6cba4-kube-api-access-x8w9g\") pod \"observability-operator-d8bb48f5d-gl8qq\" (UID: \"02c05089-1da5-466d-ae93-bd7b99d6cba4\") " pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.474004 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/02c05089-1da5-466d-ae93-bd7b99d6cba4-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-gl8qq\" (UID: \"02c05089-1da5-466d-ae93-bd7b99d6cba4\") " pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.475490 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-gl8qq"] Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.575996 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8w9g\" (UniqueName: \"kubernetes.io/projected/02c05089-1da5-466d-ae93-bd7b99d6cba4-kube-api-access-x8w9g\") pod \"observability-operator-d8bb48f5d-gl8qq\" (UID: \"02c05089-1da5-466d-ae93-bd7b99d6cba4\") " pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.576080 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/02c05089-1da5-466d-ae93-bd7b99d6cba4-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-gl8qq\" (UID: \"02c05089-1da5-466d-ae93-bd7b99d6cba4\") " pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.587277 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/02c05089-1da5-466d-ae93-bd7b99d6cba4-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-gl8qq\" (UID: \"02c05089-1da5-466d-ae93-bd7b99d6cba4\") " pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.625640 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-5cbf2"] Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.627077 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-5cbf2" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.628506 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8w9g\" (UniqueName: \"kubernetes.io/projected/02c05089-1da5-466d-ae93-bd7b99d6cba4-kube-api-access-x8w9g\") pod \"observability-operator-d8bb48f5d-gl8qq\" (UID: \"02c05089-1da5-466d-ae93-bd7b99d6cba4\") " pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.631044 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-4m4w7" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.670685 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-5cbf2"] Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.681743 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.766827 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j"] Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.785707 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/4122a230-b1b6-4725-b02a-a0829dfa4f3e-openshift-service-ca\") pod \"perses-operator-5446b9c989-5cbf2\" (UID: \"4122a230-b1b6-4725-b02a-a0829dfa4f3e\") " pod="openshift-operators/perses-operator-5446b9c989-5cbf2" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.785869 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghftq\" (UniqueName: \"kubernetes.io/projected/4122a230-b1b6-4725-b02a-a0829dfa4f3e-kube-api-access-ghftq\") pod \"perses-operator-5446b9c989-5cbf2\" (UID: \"4122a230-b1b6-4725-b02a-a0829dfa4f3e\") " pod="openshift-operators/perses-operator-5446b9c989-5cbf2" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.796299 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt"] Dec 03 20:25:17 crc kubenswrapper[4916]: W1203 20:25:17.803879 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9a62187_a514_4067_8eae_ed64cd6daa76.slice/crio-055d3c4492214ddb14217ef720ef6bf8155ee2a664b399229fc48c9293142910 WatchSource:0}: Error finding container 055d3c4492214ddb14217ef720ef6bf8155ee2a664b399229fc48c9293142910: Status 404 returned error can't find the container with id 055d3c4492214ddb14217ef720ef6bf8155ee2a664b399229fc48c9293142910 Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.887267 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghftq\" (UniqueName: \"kubernetes.io/projected/4122a230-b1b6-4725-b02a-a0829dfa4f3e-kube-api-access-ghftq\") pod \"perses-operator-5446b9c989-5cbf2\" (UID: \"4122a230-b1b6-4725-b02a-a0829dfa4f3e\") " pod="openshift-operators/perses-operator-5446b9c989-5cbf2" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.887374 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/4122a230-b1b6-4725-b02a-a0829dfa4f3e-openshift-service-ca\") pod \"perses-operator-5446b9c989-5cbf2\" (UID: \"4122a230-b1b6-4725-b02a-a0829dfa4f3e\") " pod="openshift-operators/perses-operator-5446b9c989-5cbf2" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.888346 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/4122a230-b1b6-4725-b02a-a0829dfa4f3e-openshift-service-ca\") pod \"perses-operator-5446b9c989-5cbf2\" (UID: \"4122a230-b1b6-4725-b02a-a0829dfa4f3e\") " pod="openshift-operators/perses-operator-5446b9c989-5cbf2" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.911477 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghftq\" (UniqueName: \"kubernetes.io/projected/4122a230-b1b6-4725-b02a-a0829dfa4f3e-kube-api-access-ghftq\") pod \"perses-operator-5446b9c989-5cbf2\" (UID: \"4122a230-b1b6-4725-b02a-a0829dfa4f3e\") " pod="openshift-operators/perses-operator-5446b9c989-5cbf2" Dec 03 20:25:17 crc kubenswrapper[4916]: I1203 20:25:17.992752 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-5cbf2" Dec 03 20:25:18 crc kubenswrapper[4916]: I1203 20:25:18.045723 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5"] Dec 03 20:25:18 crc kubenswrapper[4916]: W1203 20:25:18.072522 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b910b03_47fc_4dff_87ca_eed3318f67e5.slice/crio-ebd17b6c6c684ce63cb8c17061a0ef1ab7c15faadf1c5c3fc9b1b3e63c0b6341 WatchSource:0}: Error finding container ebd17b6c6c684ce63cb8c17061a0ef1ab7c15faadf1c5c3fc9b1b3e63c0b6341: Status 404 returned error can't find the container with id ebd17b6c6c684ce63cb8c17061a0ef1ab7c15faadf1c5c3fc9b1b3e63c0b6341 Dec 03 20:25:18 crc kubenswrapper[4916]: I1203 20:25:18.231143 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-gl8qq"] Dec 03 20:25:18 crc kubenswrapper[4916]: I1203 20:25:18.556311 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-5cbf2"] Dec 03 20:25:18 crc kubenswrapper[4916]: W1203 20:25:18.567151 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4122a230_b1b6_4725_b02a_a0829dfa4f3e.slice/crio-6fc56e1f49912c8880a6cf01f303a6d4a554976e29cc389232c746a84f280d8a WatchSource:0}: Error finding container 6fc56e1f49912c8880a6cf01f303a6d4a554976e29cc389232c746a84f280d8a: Status 404 returned error can't find the container with id 6fc56e1f49912c8880a6cf01f303a6d4a554976e29cc389232c746a84f280d8a Dec 03 20:25:18 crc kubenswrapper[4916]: I1203 20:25:18.706187 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" event={"ID":"8b910b03-47fc-4dff-87ca-eed3318f67e5","Type":"ContainerStarted","Data":"ebd17b6c6c684ce63cb8c17061a0ef1ab7c15faadf1c5c3fc9b1b3e63c0b6341"} Dec 03 20:25:18 crc kubenswrapper[4916]: I1203 20:25:18.707608 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-5cbf2" event={"ID":"4122a230-b1b6-4725-b02a-a0829dfa4f3e","Type":"ContainerStarted","Data":"6fc56e1f49912c8880a6cf01f303a6d4a554976e29cc389232c746a84f280d8a"} Dec 03 20:25:18 crc kubenswrapper[4916]: I1203 20:25:18.708722 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" event={"ID":"02c05089-1da5-466d-ae93-bd7b99d6cba4","Type":"ContainerStarted","Data":"3a5137b53249a43652adb98666b7ec71a12b237bcc48abafd795d56a892229b4"} Dec 03 20:25:18 crc kubenswrapper[4916]: I1203 20:25:18.710147 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt" event={"ID":"b9a62187-a514-4067-8eae-ed64cd6daa76","Type":"ContainerStarted","Data":"055d3c4492214ddb14217ef720ef6bf8155ee2a664b399229fc48c9293142910"} Dec 03 20:25:18 crc kubenswrapper[4916]: I1203 20:25:18.711255 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j" event={"ID":"e7cb08b3-e6d4-4165-ba93-b35ed50108c7","Type":"ContainerStarted","Data":"2272753dc4389b29d798855876ea0bb69670204c3baac021d157b61ed7f852c9"} Dec 03 20:25:23 crc kubenswrapper[4916]: I1203 20:25:23.007712 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t8vhw" podUID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerName="registry-server" probeResult="failure" output=< Dec 03 20:25:23 crc kubenswrapper[4916]: timeout: failed to connect service ":50051" within 1s Dec 03 20:25:23 crc kubenswrapper[4916]: > Dec 03 20:25:24 crc kubenswrapper[4916]: I1203 20:25:24.713031 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2hg2v"] Dec 03 20:25:24 crc kubenswrapper[4916]: I1203 20:25:24.715424 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:24 crc kubenswrapper[4916]: I1203 20:25:24.725470 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2hg2v"] Dec 03 20:25:24 crc kubenswrapper[4916]: I1203 20:25:24.829833 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c854\" (UniqueName: \"kubernetes.io/projected/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-kube-api-access-2c854\") pod \"community-operators-2hg2v\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:24 crc kubenswrapper[4916]: I1203 20:25:24.829936 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-catalog-content\") pod \"community-operators-2hg2v\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:24 crc kubenswrapper[4916]: I1203 20:25:24.829958 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-utilities\") pod \"community-operators-2hg2v\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:24 crc kubenswrapper[4916]: I1203 20:25:24.931382 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-catalog-content\") pod \"community-operators-2hg2v\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:24 crc kubenswrapper[4916]: I1203 20:25:24.931635 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-utilities\") pod \"community-operators-2hg2v\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:24 crc kubenswrapper[4916]: I1203 20:25:24.931751 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c854\" (UniqueName: \"kubernetes.io/projected/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-kube-api-access-2c854\") pod \"community-operators-2hg2v\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:24 crc kubenswrapper[4916]: I1203 20:25:24.931856 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-catalog-content\") pod \"community-operators-2hg2v\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:24 crc kubenswrapper[4916]: I1203 20:25:24.932023 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-utilities\") pod \"community-operators-2hg2v\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:25 crc kubenswrapper[4916]: I1203 20:25:25.262118 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c854\" (UniqueName: \"kubernetes.io/projected/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-kube-api-access-2c854\") pod \"community-operators-2hg2v\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:25 crc kubenswrapper[4916]: I1203 20:25:25.346939 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.101154 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gk9fl"] Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.103990 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.137519 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk9fl"] Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.180467 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-catalog-content\") pod \"redhat-marketplace-gk9fl\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.180836 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-utilities\") pod \"redhat-marketplace-gk9fl\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.181005 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl7d2\" (UniqueName: \"kubernetes.io/projected/30e5172e-f0a1-41d0-8c11-a0565cc93597-kube-api-access-gl7d2\") pod \"redhat-marketplace-gk9fl\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.282622 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-catalog-content\") pod \"redhat-marketplace-gk9fl\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.282704 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-utilities\") pod \"redhat-marketplace-gk9fl\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.282801 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl7d2\" (UniqueName: \"kubernetes.io/projected/30e5172e-f0a1-41d0-8c11-a0565cc93597-kube-api-access-gl7d2\") pod \"redhat-marketplace-gk9fl\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.284621 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-catalog-content\") pod \"redhat-marketplace-gk9fl\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.284982 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-utilities\") pod \"redhat-marketplace-gk9fl\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.300375 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl7d2\" (UniqueName: \"kubernetes.io/projected/30e5172e-f0a1-41d0-8c11-a0565cc93597-kube-api-access-gl7d2\") pod \"redhat-marketplace-gk9fl\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:27 crc kubenswrapper[4916]: I1203 20:25:27.433387 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:31 crc kubenswrapper[4916]: I1203 20:25:31.976853 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:32 crc kubenswrapper[4916]: I1203 20:25:32.028408 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:33 crc kubenswrapper[4916]: E1203 20:25:33.697203 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3" Dec 03 20:25:33 crc kubenswrapper[4916]: E1203 20:25:33.697709 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cqs7s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-668cf9dfbb-z8g4j_openshift-operators(e7cb08b3-e6d4-4165-ba93-b35ed50108c7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 20:25:33 crc kubenswrapper[4916]: E1203 20:25:33.699075 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j" podUID="e7cb08b3-e6d4-4165-ba93-b35ed50108c7" Dec 03 20:25:33 crc kubenswrapper[4916]: E1203 20:25:33.768013 4916 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec" Dec 03 20:25:33 crc kubenswrapper[4916]: E1203 20:25:33.768164 4916 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:43d33f0125e6b990f4a972ac4e952a065d7e72dc1690c6c836963b7341734aec,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5_openshift-operators(8b910b03-47fc-4dff-87ca-eed3318f67e5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 20:25:33 crc kubenswrapper[4916]: E1203 20:25:33.769332 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" podUID="8b910b03-47fc-4dff-87ca-eed3318f67e5" Dec 03 20:25:33 crc kubenswrapper[4916]: I1203 20:25:33.909778 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t8vhw"] Dec 03 20:25:33 crc kubenswrapper[4916]: I1203 20:25:33.939000 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-t8vhw" podUID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerName="registry-server" containerID="cri-o://4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7" gracePeriod=2 Dec 03 20:25:33 crc kubenswrapper[4916]: E1203 20:25:33.941305 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:203cf5b9dc1460f09e75f58d8b5cf7df5e57c18c8c6a41c14b5e8977d83263f3\\\"\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j" podUID="e7cb08b3-e6d4-4165-ba93-b35ed50108c7" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.302446 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2hg2v"] Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.325355 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk9fl"] Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.465279 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.589454 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5mct\" (UniqueName: \"kubernetes.io/projected/4571c6a8-ef9c-43b9-abb4-81e89651e5af-kube-api-access-d5mct\") pod \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.589532 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-utilities\") pod \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.589617 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-catalog-content\") pod \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\" (UID: \"4571c6a8-ef9c-43b9-abb4-81e89651e5af\") " Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.591453 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-utilities" (OuterVolumeSpecName: "utilities") pod "4571c6a8-ef9c-43b9-abb4-81e89651e5af" (UID: "4571c6a8-ef9c-43b9-abb4-81e89651e5af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.613321 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4571c6a8-ef9c-43b9-abb4-81e89651e5af-kube-api-access-d5mct" (OuterVolumeSpecName: "kube-api-access-d5mct") pod "4571c6a8-ef9c-43b9-abb4-81e89651e5af" (UID: "4571c6a8-ef9c-43b9-abb4-81e89651e5af"). InnerVolumeSpecName "kube-api-access-d5mct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.691898 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5mct\" (UniqueName: \"kubernetes.io/projected/4571c6a8-ef9c-43b9-abb4-81e89651e5af-kube-api-access-d5mct\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.692327 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.727311 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4571c6a8-ef9c-43b9-abb4-81e89651e5af" (UID: "4571c6a8-ef9c-43b9-abb4-81e89651e5af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.794731 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4571c6a8-ef9c-43b9-abb4-81e89651e5af-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.948920 4916 generic.go:334] "Generic (PLEG): container finished" podID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" containerID="7c8b966ed1ce47c29f916a17ba2d951f0a003395da8c4ffc0de4dfbeb10ce984" exitCode=0 Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.949062 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hg2v" event={"ID":"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1","Type":"ContainerDied","Data":"7c8b966ed1ce47c29f916a17ba2d951f0a003395da8c4ffc0de4dfbeb10ce984"} Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.949100 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hg2v" event={"ID":"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1","Type":"ContainerStarted","Data":"b723e17f9abfc78c433590c561fd3fa67462b82958f3d031751a18eac7e31520"} Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.950395 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" event={"ID":"8b910b03-47fc-4dff-87ca-eed3318f67e5","Type":"ContainerStarted","Data":"27c1ca71759451efa67a065586ecdc448b609419d13286c4fd6ee5fab64cb981"} Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.952805 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-5cbf2" event={"ID":"4122a230-b1b6-4725-b02a-a0829dfa4f3e","Type":"ContainerStarted","Data":"40d464f07c0dc7671aeb6720324440cc61554e3d29cdec8f72649a73854a0ce0"} Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.953056 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-5cbf2" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.955453 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" event={"ID":"02c05089-1da5-466d-ae93-bd7b99d6cba4","Type":"ContainerStarted","Data":"02900a4b5184d53c0ab0b6211b2e65213a9b7563f488232df6ab9bcc5a5234f0"} Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.955735 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.959551 4916 generic.go:334] "Generic (PLEG): container finished" podID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerID="4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7" exitCode=0 Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.959633 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t8vhw" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.959665 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8vhw" event={"ID":"4571c6a8-ef9c-43b9-abb4-81e89651e5af","Type":"ContainerDied","Data":"4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7"} Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.959690 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t8vhw" event={"ID":"4571c6a8-ef9c-43b9-abb4-81e89651e5af","Type":"ContainerDied","Data":"0ce06c8a411e968408c76903c7493a5f0a6ef6bd294c16b07bfbccbddaf3cfb5"} Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.959737 4916 scope.go:117] "RemoveContainer" containerID="4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.961325 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt" event={"ID":"b9a62187-a514-4067-8eae-ed64cd6daa76","Type":"ContainerStarted","Data":"314c553870d4707cffeac0d611d9df980b3786297a0cf02ff03c09a67d44022c"} Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.963438 4916 generic.go:334] "Generic (PLEG): container finished" podID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerID="a57bd223ccd90fff41ed78408bb8918aaca89a6245f5ed5b1f538b1ee3a05ab4" exitCode=0 Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.963546 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk9fl" event={"ID":"30e5172e-f0a1-41d0-8c11-a0565cc93597","Type":"ContainerDied","Data":"a57bd223ccd90fff41ed78408bb8918aaca89a6245f5ed5b1f538b1ee3a05ab4"} Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.963653 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk9fl" event={"ID":"30e5172e-f0a1-41d0-8c11-a0565cc93597","Type":"ContainerStarted","Data":"1b87755189ac228bbc468a1ee39fe8fd29d293dd43801c4b3641427580a3a30a"} Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.980233 4916 scope.go:117] "RemoveContainer" containerID="de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.985207 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5" podStartSLOduration=-9223372017.869583 podStartE2EDuration="18.985193197s" podCreationTimestamp="2025-12-03 20:25:16 +0000 UTC" firstStartedPulling="2025-12-03 20:25:18.091690446 +0000 UTC m=+3334.054500712" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:25:34.983324417 +0000 UTC m=+3350.946134683" watchObservedRunningTime="2025-12-03 20:25:34.985193197 +0000 UTC m=+3350.948003463" Dec 03 20:25:34 crc kubenswrapper[4916]: I1203 20:25:34.995394 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.011697 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-5cbf2" podStartSLOduration=2.80438267 podStartE2EDuration="18.011679747s" podCreationTimestamp="2025-12-03 20:25:17 +0000 UTC" firstStartedPulling="2025-12-03 20:25:18.574763924 +0000 UTC m=+3334.537574190" lastFinishedPulling="2025-12-03 20:25:33.782061001 +0000 UTC m=+3349.744871267" observedRunningTime="2025-12-03 20:25:35.007413894 +0000 UTC m=+3350.970224160" watchObservedRunningTime="2025-12-03 20:25:35.011679747 +0000 UTC m=+3350.974490013" Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.032285 4916 scope.go:117] "RemoveContainer" containerID="89e29e7eb7db0f1121a0837714f140942a7aaf98cf26fd1bbbc845aec751bc20" Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.099037 4916 scope.go:117] "RemoveContainer" containerID="4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7" Dec 03 20:25:35 crc kubenswrapper[4916]: E1203 20:25:35.102038 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7\": container with ID starting with 4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7 not found: ID does not exist" containerID="4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7" Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.102081 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7"} err="failed to get container status \"4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7\": rpc error: code = NotFound desc = could not find container \"4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7\": container with ID starting with 4451fd5a3b22b146a38436f20f932663ba3954196316c51c19eaacdc3c8681b7 not found: ID does not exist" Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.102107 4916 scope.go:117] "RemoveContainer" containerID="de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f" Dec 03 20:25:35 crc kubenswrapper[4916]: E1203 20:25:35.102329 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f\": container with ID starting with de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f not found: ID does not exist" containerID="de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f" Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.102378 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f"} err="failed to get container status \"de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f\": rpc error: code = NotFound desc = could not find container \"de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f\": container with ID starting with de4828c2e2f52f7c028d4010d3f3ba38770378398e5e8adadc58e856e77e826f not found: ID does not exist" Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.102392 4916 scope.go:117] "RemoveContainer" containerID="89e29e7eb7db0f1121a0837714f140942a7aaf98cf26fd1bbbc845aec751bc20" Dec 03 20:25:35 crc kubenswrapper[4916]: E1203 20:25:35.102637 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89e29e7eb7db0f1121a0837714f140942a7aaf98cf26fd1bbbc845aec751bc20\": container with ID starting with 89e29e7eb7db0f1121a0837714f140942a7aaf98cf26fd1bbbc845aec751bc20 not found: ID does not exist" containerID="89e29e7eb7db0f1121a0837714f140942a7aaf98cf26fd1bbbc845aec751bc20" Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.102689 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89e29e7eb7db0f1121a0837714f140942a7aaf98cf26fd1bbbc845aec751bc20"} err="failed to get container status \"89e29e7eb7db0f1121a0837714f140942a7aaf98cf26fd1bbbc845aec751bc20\": rpc error: code = NotFound desc = could not find container \"89e29e7eb7db0f1121a0837714f140942a7aaf98cf26fd1bbbc845aec751bc20\": container with ID starting with 89e29e7eb7db0f1121a0837714f140942a7aaf98cf26fd1bbbc845aec751bc20 not found: ID does not exist" Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.149174 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt" podStartSLOduration=3.1783058889999998 podStartE2EDuration="19.149157554s" podCreationTimestamp="2025-12-03 20:25:16 +0000 UTC" firstStartedPulling="2025-12-03 20:25:17.811256668 +0000 UTC m=+3333.774066934" lastFinishedPulling="2025-12-03 20:25:33.782108313 +0000 UTC m=+3349.744918599" observedRunningTime="2025-12-03 20:25:35.0771718 +0000 UTC m=+3351.039982086" watchObservedRunningTime="2025-12-03 20:25:35.149157554 +0000 UTC m=+3351.111967820" Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.179625 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t8vhw"] Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.188623 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-t8vhw"] Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.207302 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-gl8qq" podStartSLOduration=2.646422151 podStartE2EDuration="18.207283761s" podCreationTimestamp="2025-12-03 20:25:17 +0000 UTC" firstStartedPulling="2025-12-03 20:25:18.267187258 +0000 UTC m=+3334.229997524" lastFinishedPulling="2025-12-03 20:25:33.828048868 +0000 UTC m=+3349.790859134" observedRunningTime="2025-12-03 20:25:35.160079383 +0000 UTC m=+3351.122889659" watchObservedRunningTime="2025-12-03 20:25:35.207283761 +0000 UTC m=+3351.170094027" Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.975885 4916 generic.go:334] "Generic (PLEG): container finished" podID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerID="543bc9d422f4f647ccf5621f9d4abc9720e4a6ef61a9de728f62ac59938e8b57" exitCode=0 Dec 03 20:25:35 crc kubenswrapper[4916]: I1203 20:25:35.976001 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk9fl" event={"ID":"30e5172e-f0a1-41d0-8c11-a0565cc93597","Type":"ContainerDied","Data":"543bc9d422f4f647ccf5621f9d4abc9720e4a6ef61a9de728f62ac59938e8b57"} Dec 03 20:25:36 crc kubenswrapper[4916]: I1203 20:25:36.487177 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" path="/var/lib/kubelet/pods/4571c6a8-ef9c-43b9-abb4-81e89651e5af/volumes" Dec 03 20:25:36 crc kubenswrapper[4916]: I1203 20:25:36.997020 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk9fl" event={"ID":"30e5172e-f0a1-41d0-8c11-a0565cc93597","Type":"ContainerStarted","Data":"ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368"} Dec 03 20:25:37 crc kubenswrapper[4916]: I1203 20:25:37.004799 4916 generic.go:334] "Generic (PLEG): container finished" podID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" containerID="3147426ea55eb3babbdd4f5f9a164e929c8e83b4de2139fb013abaf794c3aa39" exitCode=0 Dec 03 20:25:37 crc kubenswrapper[4916]: I1203 20:25:37.005047 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hg2v" event={"ID":"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1","Type":"ContainerDied","Data":"3147426ea55eb3babbdd4f5f9a164e929c8e83b4de2139fb013abaf794c3aa39"} Dec 03 20:25:37 crc kubenswrapper[4916]: I1203 20:25:37.054449 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gk9fl" podStartSLOduration=8.584012365 podStartE2EDuration="10.05440677s" podCreationTimestamp="2025-12-03 20:25:27 +0000 UTC" firstStartedPulling="2025-12-03 20:25:34.965090085 +0000 UTC m=+3350.927900351" lastFinishedPulling="2025-12-03 20:25:36.43548449 +0000 UTC m=+3352.398294756" observedRunningTime="2025-12-03 20:25:37.021937871 +0000 UTC m=+3352.984748137" watchObservedRunningTime="2025-12-03 20:25:37.05440677 +0000 UTC m=+3353.017217036" Dec 03 20:25:37 crc kubenswrapper[4916]: I1203 20:25:37.434806 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:37 crc kubenswrapper[4916]: I1203 20:25:37.434854 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:38 crc kubenswrapper[4916]: I1203 20:25:38.489627 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-gk9fl" podUID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerName="registry-server" probeResult="failure" output=< Dec 03 20:25:38 crc kubenswrapper[4916]: timeout: failed to connect service ":50051" within 1s Dec 03 20:25:38 crc kubenswrapper[4916]: > Dec 03 20:25:39 crc kubenswrapper[4916]: I1203 20:25:39.024697 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hg2v" event={"ID":"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1","Type":"ContainerStarted","Data":"a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59"} Dec 03 20:25:39 crc kubenswrapper[4916]: I1203 20:25:39.044307 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2hg2v" podStartSLOduration=12.273165496 podStartE2EDuration="15.044292657s" podCreationTimestamp="2025-12-03 20:25:24 +0000 UTC" firstStartedPulling="2025-12-03 20:25:34.954140265 +0000 UTC m=+3350.916950531" lastFinishedPulling="2025-12-03 20:25:37.725267416 +0000 UTC m=+3353.688077692" observedRunningTime="2025-12-03 20:25:39.037692892 +0000 UTC m=+3355.000503158" watchObservedRunningTime="2025-12-03 20:25:39.044292657 +0000 UTC m=+3355.007102923" Dec 03 20:25:44 crc kubenswrapper[4916]: I1203 20:25:44.031820 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 03 20:25:44 crc kubenswrapper[4916]: I1203 20:25:44.032830 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-api" containerID="cri-o://b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32" gracePeriod=30 Dec 03 20:25:44 crc kubenswrapper[4916]: I1203 20:25:44.032909 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-listener" containerID="cri-o://e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7" gracePeriod=30 Dec 03 20:25:44 crc kubenswrapper[4916]: I1203 20:25:44.032987 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-notifier" containerID="cri-o://66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779" gracePeriod=30 Dec 03 20:25:44 crc kubenswrapper[4916]: I1203 20:25:44.032992 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-evaluator" containerID="cri-o://63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188" gracePeriod=30 Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.095065 4916 generic.go:334] "Generic (PLEG): container finished" podID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerID="63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188" exitCode=0 Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.095521 4916 generic.go:334] "Generic (PLEG): container finished" podID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerID="b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32" exitCode=0 Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.095160 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"6dbf29e4-a2e3-4882-9513-5d39d513451a","Type":"ContainerDied","Data":"63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188"} Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.095616 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"6dbf29e4-a2e3-4882-9513-5d39d513451a","Type":"ContainerDied","Data":"b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32"} Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.305949 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x5ms5"] Dec 03 20:25:45 crc kubenswrapper[4916]: E1203 20:25:45.307256 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerName="registry-server" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.307305 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerName="registry-server" Dec 03 20:25:45 crc kubenswrapper[4916]: E1203 20:25:45.307348 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerName="extract-content" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.307365 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerName="extract-content" Dec 03 20:25:45 crc kubenswrapper[4916]: E1203 20:25:45.307447 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerName="extract-utilities" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.307468 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerName="extract-utilities" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.307988 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="4571c6a8-ef9c-43b9-abb4-81e89651e5af" containerName="registry-server" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.311100 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.320629 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x5ms5"] Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.347302 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.347438 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.403463 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-utilities\") pod \"certified-operators-x5ms5\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.403517 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxp55\" (UniqueName: \"kubernetes.io/projected/a96efff5-5bf8-46f6-a64c-c6db9c40e654-kube-api-access-vxp55\") pod \"certified-operators-x5ms5\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.403734 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-catalog-content\") pod \"certified-operators-x5ms5\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.413860 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.506382 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-utilities\") pod \"certified-operators-x5ms5\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.506428 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxp55\" (UniqueName: \"kubernetes.io/projected/a96efff5-5bf8-46f6-a64c-c6db9c40e654-kube-api-access-vxp55\") pod \"certified-operators-x5ms5\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.506628 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-catalog-content\") pod \"certified-operators-x5ms5\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.507078 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-catalog-content\") pod \"certified-operators-x5ms5\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.507365 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-utilities\") pod \"certified-operators-x5ms5\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.534876 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxp55\" (UniqueName: \"kubernetes.io/projected/a96efff5-5bf8-46f6-a64c-c6db9c40e654-kube-api-access-vxp55\") pod \"certified-operators-x5ms5\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:45 crc kubenswrapper[4916]: I1203 20:25:45.660053 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:46 crc kubenswrapper[4916]: I1203 20:25:46.124471 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x5ms5"] Dec 03 20:25:46 crc kubenswrapper[4916]: I1203 20:25:46.155901 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:46 crc kubenswrapper[4916]: I1203 20:25:46.158445 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:25:46 crc kubenswrapper[4916]: I1203 20:25:46.158495 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:25:47 crc kubenswrapper[4916]: I1203 20:25:47.114681 4916 generic.go:334] "Generic (PLEG): container finished" podID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" containerID="35bb55ada4041c51b529622c1e028ee5b75650859b8af3ab4f4addb5409a7427" exitCode=0 Dec 03 20:25:47 crc kubenswrapper[4916]: I1203 20:25:47.114735 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ms5" event={"ID":"a96efff5-5bf8-46f6-a64c-c6db9c40e654","Type":"ContainerDied","Data":"35bb55ada4041c51b529622c1e028ee5b75650859b8af3ab4f4addb5409a7427"} Dec 03 20:25:47 crc kubenswrapper[4916]: I1203 20:25:47.115076 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ms5" event={"ID":"a96efff5-5bf8-46f6-a64c-c6db9c40e654","Type":"ContainerStarted","Data":"0cb97d362000bdf90aa9fa96b46e168d459b07d73985148515dd28350d361355"} Dec 03 20:25:47 crc kubenswrapper[4916]: I1203 20:25:47.493985 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:47 crc kubenswrapper[4916]: I1203 20:25:47.561422 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:47 crc kubenswrapper[4916]: I1203 20:25:47.883847 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2hg2v"] Dec 03 20:25:47 crc kubenswrapper[4916]: I1203 20:25:47.996300 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-5cbf2" Dec 03 20:25:48 crc kubenswrapper[4916]: I1203 20:25:48.125037 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ms5" event={"ID":"a96efff5-5bf8-46f6-a64c-c6db9c40e654","Type":"ContainerStarted","Data":"f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db"} Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.147524 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j" event={"ID":"e7cb08b3-e6d4-4165-ba93-b35ed50108c7","Type":"ContainerStarted","Data":"c4ed89cce933ab835f62d3217e1e260149656df3728e0d6b3f41df0ae5228725"} Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.163289 4916 generic.go:334] "Generic (PLEG): container finished" podID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" containerID="f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db" exitCode=0 Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.163728 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ms5" event={"ID":"a96efff5-5bf8-46f6-a64c-c6db9c40e654","Type":"ContainerDied","Data":"f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db"} Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.174053 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-z8g4j" podStartSLOduration=2.793824068 podStartE2EDuration="33.174031935s" podCreationTimestamp="2025-12-03 20:25:16 +0000 UTC" firstStartedPulling="2025-12-03 20:25:17.747796409 +0000 UTC m=+3333.710606675" lastFinishedPulling="2025-12-03 20:25:48.128004276 +0000 UTC m=+3364.090814542" observedRunningTime="2025-12-03 20:25:49.170398539 +0000 UTC m=+3365.133208805" watchObservedRunningTime="2025-12-03 20:25:49.174031935 +0000 UTC m=+3365.136842201" Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.187157 4916 generic.go:334] "Generic (PLEG): container finished" podID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerID="66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779" exitCode=0 Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.187440 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2hg2v" podUID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" containerName="registry-server" containerID="cri-o://a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59" gracePeriod=2 Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.187842 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"6dbf29e4-a2e3-4882-9513-5d39d513451a","Type":"ContainerDied","Data":"66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779"} Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.825328 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.832397 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.905231 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c854\" (UniqueName: \"kubernetes.io/projected/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-kube-api-access-2c854\") pod \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.905279 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-scripts\") pod \"6dbf29e4-a2e3-4882-9513-5d39d513451a\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.905334 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhb4d\" (UniqueName: \"kubernetes.io/projected/6dbf29e4-a2e3-4882-9513-5d39d513451a-kube-api-access-vhb4d\") pod \"6dbf29e4-a2e3-4882-9513-5d39d513451a\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.905432 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-utilities\") pod \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.905549 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-catalog-content\") pod \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\" (UID: \"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1\") " Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.905641 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-combined-ca-bundle\") pod \"6dbf29e4-a2e3-4882-9513-5d39d513451a\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.905680 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-config-data\") pod \"6dbf29e4-a2e3-4882-9513-5d39d513451a\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.905784 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-internal-tls-certs\") pod \"6dbf29e4-a2e3-4882-9513-5d39d513451a\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.905807 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-public-tls-certs\") pod \"6dbf29e4-a2e3-4882-9513-5d39d513451a\" (UID: \"6dbf29e4-a2e3-4882-9513-5d39d513451a\") " Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.907769 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-utilities" (OuterVolumeSpecName: "utilities") pod "2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" (UID: "2e4ac16d-8e40-4f6e-bf2d-27715ad933d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.912055 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-kube-api-access-2c854" (OuterVolumeSpecName: "kube-api-access-2c854") pod "2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" (UID: "2e4ac16d-8e40-4f6e-bf2d-27715ad933d1"). InnerVolumeSpecName "kube-api-access-2c854". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.921717 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-scripts" (OuterVolumeSpecName: "scripts") pod "6dbf29e4-a2e3-4882-9513-5d39d513451a" (UID: "6dbf29e4-a2e3-4882-9513-5d39d513451a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.921734 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dbf29e4-a2e3-4882-9513-5d39d513451a-kube-api-access-vhb4d" (OuterVolumeSpecName: "kube-api-access-vhb4d") pod "6dbf29e4-a2e3-4882-9513-5d39d513451a" (UID: "6dbf29e4-a2e3-4882-9513-5d39d513451a"). InnerVolumeSpecName "kube-api-access-vhb4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:25:49 crc kubenswrapper[4916]: I1203 20:25:49.975797 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6dbf29e4-a2e3-4882-9513-5d39d513451a" (UID: "6dbf29e4-a2e3-4882-9513-5d39d513451a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.005723 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6dbf29e4-a2e3-4882-9513-5d39d513451a" (UID: "6dbf29e4-a2e3-4882-9513-5d39d513451a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.007702 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c854\" (UniqueName: \"kubernetes.io/projected/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-kube-api-access-2c854\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.007724 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.007734 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhb4d\" (UniqueName: \"kubernetes.io/projected/6dbf29e4-a2e3-4882-9513-5d39d513451a-kube-api-access-vhb4d\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.007742 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.007750 4916 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.007759 4916 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.017889 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" (UID: "2e4ac16d-8e40-4f6e-bf2d-27715ad933d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.041841 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6dbf29e4-a2e3-4882-9513-5d39d513451a" (UID: "6dbf29e4-a2e3-4882-9513-5d39d513451a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.068813 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-config-data" (OuterVolumeSpecName: "config-data") pod "6dbf29e4-a2e3-4882-9513-5d39d513451a" (UID: "6dbf29e4-a2e3-4882-9513-5d39d513451a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.109432 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.109467 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.109479 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dbf29e4-a2e3-4882-9513-5d39d513451a-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.199047 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ms5" event={"ID":"a96efff5-5bf8-46f6-a64c-c6db9c40e654","Type":"ContainerStarted","Data":"6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04"} Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.202043 4916 generic.go:334] "Generic (PLEG): container finished" podID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerID="e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7" exitCode=0 Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.202165 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"6dbf29e4-a2e3-4882-9513-5d39d513451a","Type":"ContainerDied","Data":"e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7"} Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.202195 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"6dbf29e4-a2e3-4882-9513-5d39d513451a","Type":"ContainerDied","Data":"30c93c027738cf0058100870b58c3e6458404b44bba6a0eb671b2ff7fd5c5e67"} Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.202237 4916 scope.go:117] "RemoveContainer" containerID="e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.202420 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.205861 4916 generic.go:334] "Generic (PLEG): container finished" podID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" containerID="a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59" exitCode=0 Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.205910 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hg2v" event={"ID":"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1","Type":"ContainerDied","Data":"a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59"} Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.205941 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2hg2v" event={"ID":"2e4ac16d-8e40-4f6e-bf2d-27715ad933d1","Type":"ContainerDied","Data":"b723e17f9abfc78c433590c561fd3fa67462b82958f3d031751a18eac7e31520"} Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.206014 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2hg2v" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.235477 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x5ms5" podStartSLOduration=2.7668483630000003 podStartE2EDuration="5.235458902s" podCreationTimestamp="2025-12-03 20:25:45 +0000 UTC" firstStartedPulling="2025-12-03 20:25:47.11686959 +0000 UTC m=+3363.079679856" lastFinishedPulling="2025-12-03 20:25:49.585480129 +0000 UTC m=+3365.548290395" observedRunningTime="2025-12-03 20:25:50.229492994 +0000 UTC m=+3366.192303260" watchObservedRunningTime="2025-12-03 20:25:50.235458902 +0000 UTC m=+3366.198269168" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.239843 4916 scope.go:117] "RemoveContainer" containerID="66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.269678 4916 scope.go:117] "RemoveContainer" containerID="63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.287973 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2hg2v"] Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.303208 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2hg2v"] Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.317037 4916 scope.go:117] "RemoveContainer" containerID="b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.319863 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.347739 4916 scope.go:117] "RemoveContainer" containerID="e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.347871 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk9fl"] Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.348138 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gk9fl" podUID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerName="registry-server" containerID="cri-o://ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368" gracePeriod=2 Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.348243 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7\": container with ID starting with e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7 not found: ID does not exist" containerID="e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.348307 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7"} err="failed to get container status \"e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7\": rpc error: code = NotFound desc = could not find container \"e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7\": container with ID starting with e40b8f138b010f76936457ab233a1f172d10871a4e11cd1b5e7e105f6c9be1d7 not found: ID does not exist" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.348334 4916 scope.go:117] "RemoveContainer" containerID="66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.351778 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779\": container with ID starting with 66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779 not found: ID does not exist" containerID="66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.351815 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779"} err="failed to get container status \"66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779\": rpc error: code = NotFound desc = could not find container \"66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779\": container with ID starting with 66466c1dbe8712a34d02f920e70e7c621c9d9549ec4b2689d5dd7ac69e65f779 not found: ID does not exist" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.351832 4916 scope.go:117] "RemoveContainer" containerID="63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.353963 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188\": container with ID starting with 63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188 not found: ID does not exist" containerID="63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.354479 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188"} err="failed to get container status \"63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188\": rpc error: code = NotFound desc = could not find container \"63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188\": container with ID starting with 63148b6e50ab814f5460dc6b86cb691a9e4faba5ab4b21bb2f0afaebf276c188 not found: ID does not exist" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.354600 4916 scope.go:117] "RemoveContainer" containerID="b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.363748 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32\": container with ID starting with b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32 not found: ID does not exist" containerID="b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.363799 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32"} err="failed to get container status \"b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32\": rpc error: code = NotFound desc = could not find container \"b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32\": container with ID starting with b54a3cdbaa91f040fe0ae99c35aebaa0520fb980e99bbd33676557459b94bc32 not found: ID does not exist" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.363825 4916 scope.go:117] "RemoveContainer" containerID="a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.395064 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-0"] Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417059 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.417518 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" containerName="extract-content" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417555 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" containerName="extract-content" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.417588 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" containerName="extract-utilities" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417597 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" containerName="extract-utilities" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.417612 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-api" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417618 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-api" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.417629 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-notifier" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417635 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-notifier" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.417647 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-listener" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417653 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-listener" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.417668 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" containerName="registry-server" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417674 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" containerName="registry-server" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.417682 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-evaluator" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417688 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-evaluator" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417867 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" containerName="registry-server" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417882 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-api" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417895 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-evaluator" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417911 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-notifier" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.417921 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" containerName="aodh-listener" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.419770 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.423120 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-public-svc" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.423150 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.423393 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-internal-svc" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.423453 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.423516 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-cqszc" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.429375 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.468454 4916 scope.go:117] "RemoveContainer" containerID="3147426ea55eb3babbdd4f5f9a164e929c8e83b4de2139fb013abaf794c3aa39" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.498964 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e4ac16d-8e40-4f6e-bf2d-27715ad933d1" path="/var/lib/kubelet/pods/2e4ac16d-8e40-4f6e-bf2d-27715ad933d1/volumes" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.500128 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dbf29e4-a2e3-4882-9513-5d39d513451a" path="/var/lib/kubelet/pods/6dbf29e4-a2e3-4882-9513-5d39d513451a/volumes" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.520656 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-scripts\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.520756 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-public-tls-certs\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.520780 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbjfl\" (UniqueName: \"kubernetes.io/projected/157e167d-c691-4baa-acbc-0fc5810b92da-kube-api-access-wbjfl\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.520821 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-config-data\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.520850 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-combined-ca-bundle\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.520930 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-internal-tls-certs\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.542312 4916 scope.go:117] "RemoveContainer" containerID="7c8b966ed1ce47c29f916a17ba2d951f0a003395da8c4ffc0de4dfbeb10ce984" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.573056 4916 scope.go:117] "RemoveContainer" containerID="a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.573435 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59\": container with ID starting with a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59 not found: ID does not exist" containerID="a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.573464 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59"} err="failed to get container status \"a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59\": rpc error: code = NotFound desc = could not find container \"a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59\": container with ID starting with a4621b9c278995f33277566454270cdaad252d73773cac3e2539d06b56fdbf59 not found: ID does not exist" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.573485 4916 scope.go:117] "RemoveContainer" containerID="3147426ea55eb3babbdd4f5f9a164e929c8e83b4de2139fb013abaf794c3aa39" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.573817 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3147426ea55eb3babbdd4f5f9a164e929c8e83b4de2139fb013abaf794c3aa39\": container with ID starting with 3147426ea55eb3babbdd4f5f9a164e929c8e83b4de2139fb013abaf794c3aa39 not found: ID does not exist" containerID="3147426ea55eb3babbdd4f5f9a164e929c8e83b4de2139fb013abaf794c3aa39" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.573846 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3147426ea55eb3babbdd4f5f9a164e929c8e83b4de2139fb013abaf794c3aa39"} err="failed to get container status \"3147426ea55eb3babbdd4f5f9a164e929c8e83b4de2139fb013abaf794c3aa39\": rpc error: code = NotFound desc = could not find container \"3147426ea55eb3babbdd4f5f9a164e929c8e83b4de2139fb013abaf794c3aa39\": container with ID starting with 3147426ea55eb3babbdd4f5f9a164e929c8e83b4de2139fb013abaf794c3aa39 not found: ID does not exist" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.573861 4916 scope.go:117] "RemoveContainer" containerID="7c8b966ed1ce47c29f916a17ba2d951f0a003395da8c4ffc0de4dfbeb10ce984" Dec 03 20:25:50 crc kubenswrapper[4916]: E1203 20:25:50.574080 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c8b966ed1ce47c29f916a17ba2d951f0a003395da8c4ffc0de4dfbeb10ce984\": container with ID starting with 7c8b966ed1ce47c29f916a17ba2d951f0a003395da8c4ffc0de4dfbeb10ce984 not found: ID does not exist" containerID="7c8b966ed1ce47c29f916a17ba2d951f0a003395da8c4ffc0de4dfbeb10ce984" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.574099 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c8b966ed1ce47c29f916a17ba2d951f0a003395da8c4ffc0de4dfbeb10ce984"} err="failed to get container status \"7c8b966ed1ce47c29f916a17ba2d951f0a003395da8c4ffc0de4dfbeb10ce984\": rpc error: code = NotFound desc = could not find container \"7c8b966ed1ce47c29f916a17ba2d951f0a003395da8c4ffc0de4dfbeb10ce984\": container with ID starting with 7c8b966ed1ce47c29f916a17ba2d951f0a003395da8c4ffc0de4dfbeb10ce984 not found: ID does not exist" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.622693 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-internal-tls-certs\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.622768 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-scripts\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.622831 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-public-tls-certs\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.622855 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbjfl\" (UniqueName: \"kubernetes.io/projected/157e167d-c691-4baa-acbc-0fc5810b92da-kube-api-access-wbjfl\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.622890 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-config-data\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.622917 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-combined-ca-bundle\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.627941 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-internal-tls-certs\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.628612 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-combined-ca-bundle\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.628772 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-config-data\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.642122 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-scripts\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.642602 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-public-tls-certs\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.678516 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbjfl\" (UniqueName: \"kubernetes.io/projected/157e167d-c691-4baa-acbc-0fc5810b92da-kube-api-access-wbjfl\") pod \"aodh-0\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " pod="openstack/aodh-0" Dec 03 20:25:50 crc kubenswrapper[4916]: I1203 20:25:50.770129 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.105649 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.147283 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gl7d2\" (UniqueName: \"kubernetes.io/projected/30e5172e-f0a1-41d0-8c11-a0565cc93597-kube-api-access-gl7d2\") pod \"30e5172e-f0a1-41d0-8c11-a0565cc93597\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.147384 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-utilities\") pod \"30e5172e-f0a1-41d0-8c11-a0565cc93597\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.147441 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-catalog-content\") pod \"30e5172e-f0a1-41d0-8c11-a0565cc93597\" (UID: \"30e5172e-f0a1-41d0-8c11-a0565cc93597\") " Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.152527 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-utilities" (OuterVolumeSpecName: "utilities") pod "30e5172e-f0a1-41d0-8c11-a0565cc93597" (UID: "30e5172e-f0a1-41d0-8c11-a0565cc93597"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.157757 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30e5172e-f0a1-41d0-8c11-a0565cc93597-kube-api-access-gl7d2" (OuterVolumeSpecName: "kube-api-access-gl7d2") pod "30e5172e-f0a1-41d0-8c11-a0565cc93597" (UID: "30e5172e-f0a1-41d0-8c11-a0565cc93597"). InnerVolumeSpecName "kube-api-access-gl7d2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.190844 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "30e5172e-f0a1-41d0-8c11-a0565cc93597" (UID: "30e5172e-f0a1-41d0-8c11-a0565cc93597"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.230962 4916 generic.go:334] "Generic (PLEG): container finished" podID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerID="ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368" exitCode=0 Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.231442 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk9fl" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.231662 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk9fl" event={"ID":"30e5172e-f0a1-41d0-8c11-a0565cc93597","Type":"ContainerDied","Data":"ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368"} Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.231731 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk9fl" event={"ID":"30e5172e-f0a1-41d0-8c11-a0565cc93597","Type":"ContainerDied","Data":"1b87755189ac228bbc468a1ee39fe8fd29d293dd43801c4b3641427580a3a30a"} Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.231752 4916 scope.go:117] "RemoveContainer" containerID="ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.253744 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gl7d2\" (UniqueName: \"kubernetes.io/projected/30e5172e-f0a1-41d0-8c11-a0565cc93597-kube-api-access-gl7d2\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.253774 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.253784 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/30e5172e-f0a1-41d0-8c11-a0565cc93597-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.278673 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.278844 4916 scope.go:117] "RemoveContainer" containerID="543bc9d422f4f647ccf5621f9d4abc9720e4a6ef61a9de728f62ac59938e8b57" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.293866 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.305773 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk9fl"] Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.316468 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk9fl"] Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.317729 4916 scope.go:117] "RemoveContainer" containerID="a57bd223ccd90fff41ed78408bb8918aaca89a6245f5ed5b1f538b1ee3a05ab4" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.347954 4916 scope.go:117] "RemoveContainer" containerID="ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368" Dec 03 20:25:51 crc kubenswrapper[4916]: E1203 20:25:51.348422 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368\": container with ID starting with ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368 not found: ID does not exist" containerID="ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.348451 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368"} err="failed to get container status \"ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368\": rpc error: code = NotFound desc = could not find container \"ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368\": container with ID starting with ea65da7014425f37f348884409c5f81ceeee69754c5d0036669631f65747f368 not found: ID does not exist" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.348472 4916 scope.go:117] "RemoveContainer" containerID="543bc9d422f4f647ccf5621f9d4abc9720e4a6ef61a9de728f62ac59938e8b57" Dec 03 20:25:51 crc kubenswrapper[4916]: E1203 20:25:51.348746 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"543bc9d422f4f647ccf5621f9d4abc9720e4a6ef61a9de728f62ac59938e8b57\": container with ID starting with 543bc9d422f4f647ccf5621f9d4abc9720e4a6ef61a9de728f62ac59938e8b57 not found: ID does not exist" containerID="543bc9d422f4f647ccf5621f9d4abc9720e4a6ef61a9de728f62ac59938e8b57" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.348766 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"543bc9d422f4f647ccf5621f9d4abc9720e4a6ef61a9de728f62ac59938e8b57"} err="failed to get container status \"543bc9d422f4f647ccf5621f9d4abc9720e4a6ef61a9de728f62ac59938e8b57\": rpc error: code = NotFound desc = could not find container \"543bc9d422f4f647ccf5621f9d4abc9720e4a6ef61a9de728f62ac59938e8b57\": container with ID starting with 543bc9d422f4f647ccf5621f9d4abc9720e4a6ef61a9de728f62ac59938e8b57 not found: ID does not exist" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.348779 4916 scope.go:117] "RemoveContainer" containerID="a57bd223ccd90fff41ed78408bb8918aaca89a6245f5ed5b1f538b1ee3a05ab4" Dec 03 20:25:51 crc kubenswrapper[4916]: E1203 20:25:51.349516 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a57bd223ccd90fff41ed78408bb8918aaca89a6245f5ed5b1f538b1ee3a05ab4\": container with ID starting with a57bd223ccd90fff41ed78408bb8918aaca89a6245f5ed5b1f538b1ee3a05ab4 not found: ID does not exist" containerID="a57bd223ccd90fff41ed78408bb8918aaca89a6245f5ed5b1f538b1ee3a05ab4" Dec 03 20:25:51 crc kubenswrapper[4916]: I1203 20:25:51.349538 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a57bd223ccd90fff41ed78408bb8918aaca89a6245f5ed5b1f538b1ee3a05ab4"} err="failed to get container status \"a57bd223ccd90fff41ed78408bb8918aaca89a6245f5ed5b1f538b1ee3a05ab4\": rpc error: code = NotFound desc = could not find container \"a57bd223ccd90fff41ed78408bb8918aaca89a6245f5ed5b1f538b1ee3a05ab4\": container with ID starting with a57bd223ccd90fff41ed78408bb8918aaca89a6245f5ed5b1f538b1ee3a05ab4 not found: ID does not exist" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.260301 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"157e167d-c691-4baa-acbc-0fc5810b92da","Type":"ContainerStarted","Data":"6557c0b3369971082cf6e55fddf8503134becc41b334923502b2087e61394f93"} Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.377741 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 03 20:25:52 crc kubenswrapper[4916]: E1203 20:25:52.378134 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerName="extract-content" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.378152 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerName="extract-content" Dec 03 20:25:52 crc kubenswrapper[4916]: E1203 20:25:52.378165 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerName="extract-utilities" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.378172 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerName="extract-utilities" Dec 03 20:25:52 crc kubenswrapper[4916]: E1203 20:25:52.378183 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerName="registry-server" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.378188 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerName="registry-server" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.378346 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="30e5172e-f0a1-41d0-8c11-a0565cc93597" containerName="registry-server" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.379759 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.381923 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.382113 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.382339 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-tst9r" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.382485 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.386871 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.395930 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.480367 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/aa884253-05a5-47e4-a258-d95aab45bb36-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.480465 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/aa884253-05a5-47e4-a258-d95aab45bb36-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.480519 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8trv\" (UniqueName: \"kubernetes.io/projected/aa884253-05a5-47e4-a258-d95aab45bb36-kube-api-access-j8trv\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.480551 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/aa884253-05a5-47e4-a258-d95aab45bb36-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.480588 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/aa884253-05a5-47e4-a258-d95aab45bb36-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.480621 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/aa884253-05a5-47e4-a258-d95aab45bb36-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.480650 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/aa884253-05a5-47e4-a258-d95aab45bb36-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.493693 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30e5172e-f0a1-41d0-8c11-a0565cc93597" path="/var/lib/kubelet/pods/30e5172e-f0a1-41d0-8c11-a0565cc93597/volumes" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.582448 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/aa884253-05a5-47e4-a258-d95aab45bb36-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.583503 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/aa884253-05a5-47e4-a258-d95aab45bb36-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.584452 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8trv\" (UniqueName: \"kubernetes.io/projected/aa884253-05a5-47e4-a258-d95aab45bb36-kube-api-access-j8trv\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.584513 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/aa884253-05a5-47e4-a258-d95aab45bb36-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.584532 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/aa884253-05a5-47e4-a258-d95aab45bb36-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.584597 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/aa884253-05a5-47e4-a258-d95aab45bb36-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.584906 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/aa884253-05a5-47e4-a258-d95aab45bb36-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.584958 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/aa884253-05a5-47e4-a258-d95aab45bb36-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.589126 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/aa884253-05a5-47e4-a258-d95aab45bb36-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.591049 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/aa884253-05a5-47e4-a258-d95aab45bb36-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.593205 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/aa884253-05a5-47e4-a258-d95aab45bb36-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.593834 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/aa884253-05a5-47e4-a258-d95aab45bb36-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.603823 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/aa884253-05a5-47e4-a258-d95aab45bb36-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.604652 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8trv\" (UniqueName: \"kubernetes.io/projected/aa884253-05a5-47e4-a258-d95aab45bb36-kube-api-access-j8trv\") pod \"alertmanager-metric-storage-0\" (UID: \"aa884253-05a5-47e4-a258-d95aab45bb36\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:52 crc kubenswrapper[4916]: I1203 20:25:52.704025 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.711621 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.722304 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.734092 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.734680 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.734698 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.734805 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.735628 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-fjxqs" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.745419 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.809866 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqr9m\" (UniqueName: \"kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-kube-api-access-xqr9m\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.809923 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/3e9df869-d7b1-401b-b5be-6a174893ce12-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.809964 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.809982 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-config\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.809998 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.810032 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.810055 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/3e9df869-d7b1-401b-b5be-6a174893ce12-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.810272 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.913101 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/3e9df869-d7b1-401b-b5be-6a174893ce12-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.913197 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.913271 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqr9m\" (UniqueName: \"kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-kube-api-access-xqr9m\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.913315 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/3e9df869-d7b1-401b-b5be-6a174893ce12-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.913358 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.913377 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-config\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.913396 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.913441 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.913890 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.913928 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/3e9df869-d7b1-401b-b5be-6a174893ce12-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.923408 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.923473 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-config\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.923526 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.931483 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqr9m\" (UniqueName: \"kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-kube-api-access-xqr9m\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.931998 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/3e9df869-d7b1-401b-b5be-6a174893ce12-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.932235 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.952275 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:53.961967 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"prometheus-metric-storage-0\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:54.051499 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:54.285279 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"157e167d-c691-4baa-acbc-0fc5810b92da","Type":"ContainerStarted","Data":"380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f"} Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:54.724533 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 03 20:25:54 crc kubenswrapper[4916]: W1203 20:25:54.731058 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa884253_05a5_47e4_a258_d95aab45bb36.slice/crio-cc9af52f7518845ea58f80ebcc482aa1bd05cc563000cd103bc92dda7ada1082 WatchSource:0}: Error finding container cc9af52f7518845ea58f80ebcc482aa1bd05cc563000cd103bc92dda7ada1082: Status 404 returned error can't find the container with id cc9af52f7518845ea58f80ebcc482aa1bd05cc563000cd103bc92dda7ada1082 Dec 03 20:25:54 crc kubenswrapper[4916]: I1203 20:25:54.742162 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:25:55 crc kubenswrapper[4916]: I1203 20:25:55.295615 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3e9df869-d7b1-401b-b5be-6a174893ce12","Type":"ContainerStarted","Data":"6640ecafed773d97a8d0ceba6fcdc540c856c32e06edab56f35dc4f1d54443eb"} Dec 03 20:25:55 crc kubenswrapper[4916]: I1203 20:25:55.297780 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"157e167d-c691-4baa-acbc-0fc5810b92da","Type":"ContainerStarted","Data":"4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9"} Dec 03 20:25:55 crc kubenswrapper[4916]: I1203 20:25:55.297821 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"157e167d-c691-4baa-acbc-0fc5810b92da","Type":"ContainerStarted","Data":"28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6"} Dec 03 20:25:55 crc kubenswrapper[4916]: I1203 20:25:55.298743 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"aa884253-05a5-47e4-a258-d95aab45bb36","Type":"ContainerStarted","Data":"cc9af52f7518845ea58f80ebcc482aa1bd05cc563000cd103bc92dda7ada1082"} Dec 03 20:25:55 crc kubenswrapper[4916]: I1203 20:25:55.663258 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:55 crc kubenswrapper[4916]: I1203 20:25:55.663318 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:55 crc kubenswrapper[4916]: I1203 20:25:55.744510 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:56 crc kubenswrapper[4916]: I1203 20:25:56.313644 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"157e167d-c691-4baa-acbc-0fc5810b92da","Type":"ContainerStarted","Data":"acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f"} Dec 03 20:25:56 crc kubenswrapper[4916]: I1203 20:25:56.332551 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=1.881453351 podStartE2EDuration="6.33253571s" podCreationTimestamp="2025-12-03 20:25:50 +0000 UTC" firstStartedPulling="2025-12-03 20:25:51.293612911 +0000 UTC m=+3367.256423177" lastFinishedPulling="2025-12-03 20:25:55.74469523 +0000 UTC m=+3371.707505536" observedRunningTime="2025-12-03 20:25:56.331630026 +0000 UTC m=+3372.294440292" watchObservedRunningTime="2025-12-03 20:25:56.33253571 +0000 UTC m=+3372.295345976" Dec 03 20:25:56 crc kubenswrapper[4916]: I1203 20:25:56.443454 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:57 crc kubenswrapper[4916]: I1203 20:25:57.091954 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x5ms5"] Dec 03 20:25:58 crc kubenswrapper[4916]: I1203 20:25:58.341454 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x5ms5" podUID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" containerName="registry-server" containerID="cri-o://6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04" gracePeriod=2 Dec 03 20:25:58 crc kubenswrapper[4916]: I1203 20:25:58.977263 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.043157 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxp55\" (UniqueName: \"kubernetes.io/projected/a96efff5-5bf8-46f6-a64c-c6db9c40e654-kube-api-access-vxp55\") pod \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.043290 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-catalog-content\") pod \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.043403 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-utilities\") pod \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\" (UID: \"a96efff5-5bf8-46f6-a64c-c6db9c40e654\") " Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.045515 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-utilities" (OuterVolumeSpecName: "utilities") pod "a96efff5-5bf8-46f6-a64c-c6db9c40e654" (UID: "a96efff5-5bf8-46f6-a64c-c6db9c40e654"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.050538 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a96efff5-5bf8-46f6-a64c-c6db9c40e654-kube-api-access-vxp55" (OuterVolumeSpecName: "kube-api-access-vxp55") pod "a96efff5-5bf8-46f6-a64c-c6db9c40e654" (UID: "a96efff5-5bf8-46f6-a64c-c6db9c40e654"). InnerVolumeSpecName "kube-api-access-vxp55". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.117588 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a96efff5-5bf8-46f6-a64c-c6db9c40e654" (UID: "a96efff5-5bf8-46f6-a64c-c6db9c40e654"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.145665 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.145696 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxp55\" (UniqueName: \"kubernetes.io/projected/a96efff5-5bf8-46f6-a64c-c6db9c40e654-kube-api-access-vxp55\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.145726 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a96efff5-5bf8-46f6-a64c-c6db9c40e654-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.356094 4916 generic.go:334] "Generic (PLEG): container finished" podID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" containerID="6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04" exitCode=0 Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.356142 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ms5" event={"ID":"a96efff5-5bf8-46f6-a64c-c6db9c40e654","Type":"ContainerDied","Data":"6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04"} Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.356169 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x5ms5" event={"ID":"a96efff5-5bf8-46f6-a64c-c6db9c40e654","Type":"ContainerDied","Data":"0cb97d362000bdf90aa9fa96b46e168d459b07d73985148515dd28350d361355"} Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.356186 4916 scope.go:117] "RemoveContainer" containerID="6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.356286 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x5ms5" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.393302 4916 scope.go:117] "RemoveContainer" containerID="f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.394666 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x5ms5"] Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.404884 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x5ms5"] Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.517662 4916 scope.go:117] "RemoveContainer" containerID="35bb55ada4041c51b529622c1e028ee5b75650859b8af3ab4f4addb5409a7427" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.811886 4916 scope.go:117] "RemoveContainer" containerID="6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04" Dec 03 20:25:59 crc kubenswrapper[4916]: E1203 20:25:59.814182 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04\": container with ID starting with 6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04 not found: ID does not exist" containerID="6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.814225 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04"} err="failed to get container status \"6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04\": rpc error: code = NotFound desc = could not find container \"6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04\": container with ID starting with 6b440be589eb86866255fb343151afd299d454b757958ecf6a272e2b42adcd04 not found: ID does not exist" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.814250 4916 scope.go:117] "RemoveContainer" containerID="f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db" Dec 03 20:25:59 crc kubenswrapper[4916]: E1203 20:25:59.814710 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db\": container with ID starting with f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db not found: ID does not exist" containerID="f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.814737 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db"} err="failed to get container status \"f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db\": rpc error: code = NotFound desc = could not find container \"f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db\": container with ID starting with f8341531b60f282a2601f94b8a1d558cf434dc20a0a48a167b7dd420816b57db not found: ID does not exist" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.814754 4916 scope.go:117] "RemoveContainer" containerID="35bb55ada4041c51b529622c1e028ee5b75650859b8af3ab4f4addb5409a7427" Dec 03 20:25:59 crc kubenswrapper[4916]: E1203 20:25:59.815000 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35bb55ada4041c51b529622c1e028ee5b75650859b8af3ab4f4addb5409a7427\": container with ID starting with 35bb55ada4041c51b529622c1e028ee5b75650859b8af3ab4f4addb5409a7427 not found: ID does not exist" containerID="35bb55ada4041c51b529622c1e028ee5b75650859b8af3ab4f4addb5409a7427" Dec 03 20:25:59 crc kubenswrapper[4916]: I1203 20:25:59.815027 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35bb55ada4041c51b529622c1e028ee5b75650859b8af3ab4f4addb5409a7427"} err="failed to get container status \"35bb55ada4041c51b529622c1e028ee5b75650859b8af3ab4f4addb5409a7427\": rpc error: code = NotFound desc = could not find container \"35bb55ada4041c51b529622c1e028ee5b75650859b8af3ab4f4addb5409a7427\": container with ID starting with 35bb55ada4041c51b529622c1e028ee5b75650859b8af3ab4f4addb5409a7427 not found: ID does not exist" Dec 03 20:26:00 crc kubenswrapper[4916]: I1203 20:26:00.500925 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" path="/var/lib/kubelet/pods/a96efff5-5bf8-46f6-a64c-c6db9c40e654/volumes" Dec 03 20:26:02 crc kubenswrapper[4916]: I1203 20:26:02.404254 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3e9df869-d7b1-401b-b5be-6a174893ce12","Type":"ContainerStarted","Data":"ef22b81fe4fb905f3fa6472061202fd963b78029efdf267d829025640a21e6fd"} Dec 03 20:26:02 crc kubenswrapper[4916]: I1203 20:26:02.406712 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"aa884253-05a5-47e4-a258-d95aab45bb36","Type":"ContainerStarted","Data":"131dc99d5f10eb807a4194a01c31baafca57350c641495945e1d81168e371213"} Dec 03 20:26:11 crc kubenswrapper[4916]: I1203 20:26:11.540324 4916 generic.go:334] "Generic (PLEG): container finished" podID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerID="ef22b81fe4fb905f3fa6472061202fd963b78029efdf267d829025640a21e6fd" exitCode=0 Dec 03 20:26:11 crc kubenswrapper[4916]: I1203 20:26:11.540451 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3e9df869-d7b1-401b-b5be-6a174893ce12","Type":"ContainerDied","Data":"ef22b81fe4fb905f3fa6472061202fd963b78029efdf267d829025640a21e6fd"} Dec 03 20:26:12 crc kubenswrapper[4916]: I1203 20:26:12.561877 4916 generic.go:334] "Generic (PLEG): container finished" podID="aa884253-05a5-47e4-a258-d95aab45bb36" containerID="131dc99d5f10eb807a4194a01c31baafca57350c641495945e1d81168e371213" exitCode=0 Dec 03 20:26:12 crc kubenswrapper[4916]: I1203 20:26:12.561939 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"aa884253-05a5-47e4-a258-d95aab45bb36","Type":"ContainerDied","Data":"131dc99d5f10eb807a4194a01c31baafca57350c641495945e1d81168e371213"} Dec 03 20:26:15 crc kubenswrapper[4916]: I1203 20:26:15.626807 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"aa884253-05a5-47e4-a258-d95aab45bb36","Type":"ContainerStarted","Data":"f4b66fc62582a8bccf31a9b7a13f0ee965bd93f8ad94d2416301a33f0ee285d1"} Dec 03 20:26:16 crc kubenswrapper[4916]: I1203 20:26:16.159120 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:26:16 crc kubenswrapper[4916]: I1203 20:26:16.159535 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:26:16 crc kubenswrapper[4916]: I1203 20:26:16.159682 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 20:26:16 crc kubenswrapper[4916]: I1203 20:26:16.160610 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ef7f0108f788da434d859e9f496d32e519da7456f6633c29dee92073a20401b4"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 20:26:16 crc kubenswrapper[4916]: I1203 20:26:16.160741 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://ef7f0108f788da434d859e9f496d32e519da7456f6633c29dee92073a20401b4" gracePeriod=600 Dec 03 20:26:16 crc kubenswrapper[4916]: I1203 20:26:16.638077 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="ef7f0108f788da434d859e9f496d32e519da7456f6633c29dee92073a20401b4" exitCode=0 Dec 03 20:26:16 crc kubenswrapper[4916]: I1203 20:26:16.638121 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"ef7f0108f788da434d859e9f496d32e519da7456f6633c29dee92073a20401b4"} Dec 03 20:26:16 crc kubenswrapper[4916]: I1203 20:26:16.638159 4916 scope.go:117] "RemoveContainer" containerID="8b3c41c4a0d4c5cfc6c0c5bef5a1b1eb0d02db023fb771c33847f0e51f7ae771" Dec 03 20:26:18 crc kubenswrapper[4916]: I1203 20:26:18.677157 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"aa884253-05a5-47e4-a258-d95aab45bb36","Type":"ContainerStarted","Data":"c95de5af08cc6849e22dd3b92eae70ed3aaeadeb13c6f7f7a4c4e99dd58231ae"} Dec 03 20:26:18 crc kubenswrapper[4916]: I1203 20:26:18.679453 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Dec 03 20:26:18 crc kubenswrapper[4916]: I1203 20:26:18.681176 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812"} Dec 03 20:26:18 crc kubenswrapper[4916]: I1203 20:26:18.683938 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Dec 03 20:26:18 crc kubenswrapper[4916]: I1203 20:26:18.684366 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3e9df869-d7b1-401b-b5be-6a174893ce12","Type":"ContainerStarted","Data":"b121673574e00b9ccf9cb4ba6dfeb7224eeada9aacf60fa1cd440f3d9b83064f"} Dec 03 20:26:18 crc kubenswrapper[4916]: I1203 20:26:18.716468 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=6.912193876 podStartE2EDuration="26.71644859s" podCreationTimestamp="2025-12-03 20:25:52 +0000 UTC" firstStartedPulling="2025-12-03 20:25:54.732551017 +0000 UTC m=+3370.695361283" lastFinishedPulling="2025-12-03 20:26:14.536805731 +0000 UTC m=+3390.499615997" observedRunningTime="2025-12-03 20:26:18.713650806 +0000 UTC m=+3394.676461152" watchObservedRunningTime="2025-12-03 20:26:18.71644859 +0000 UTC m=+3394.679258856" Dec 03 20:26:25 crc kubenswrapper[4916]: I1203 20:26:25.773313 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3e9df869-d7b1-401b-b5be-6a174893ce12","Type":"ContainerStarted","Data":"37305a55e77abda438f26d1f0f4e3c805298a81f4e95a30440b008f273ceae9e"} Dec 03 20:26:28 crc kubenswrapper[4916]: I1203 20:26:28.816212 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3e9df869-d7b1-401b-b5be-6a174893ce12","Type":"ContainerStarted","Data":"c1b2dbcdedade80711ba176f059c7154782d467b287346764bd49a1aba58a9ba"} Dec 03 20:26:29 crc kubenswrapper[4916]: I1203 20:26:29.052645 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:39 crc kubenswrapper[4916]: I1203 20:26:39.052679 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:39 crc kubenswrapper[4916]: I1203 20:26:39.056011 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:39 crc kubenswrapper[4916]: I1203 20:26:39.108486 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=13.641832465 podStartE2EDuration="47.108448521s" podCreationTimestamp="2025-12-03 20:25:52 +0000 UTC" firstStartedPulling="2025-12-03 20:25:54.749464044 +0000 UTC m=+3370.712274310" lastFinishedPulling="2025-12-03 20:26:28.2160801 +0000 UTC m=+3404.178890366" observedRunningTime="2025-12-03 20:26:28.855324709 +0000 UTC m=+3404.818135025" watchObservedRunningTime="2025-12-03 20:26:39.108448521 +0000 UTC m=+3415.071258817" Dec 03 20:26:39 crc kubenswrapper[4916]: I1203 20:26:39.952244 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.666466 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.666951 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="57c2516a-b9ff-4816-947e-070103fba378" containerName="openstackclient" containerID="cri-o://eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205" gracePeriod=2 Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.685062 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.746390 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 20:26:41 crc kubenswrapper[4916]: E1203 20:26:41.747181 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" containerName="registry-server" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.747202 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" containerName="registry-server" Dec 03 20:26:41 crc kubenswrapper[4916]: E1203 20:26:41.747218 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" containerName="extract-content" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.747225 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" containerName="extract-content" Dec 03 20:26:41 crc kubenswrapper[4916]: E1203 20:26:41.747253 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57c2516a-b9ff-4816-947e-070103fba378" containerName="openstackclient" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.750665 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="57c2516a-b9ff-4816-947e-070103fba378" containerName="openstackclient" Dec 03 20:26:41 crc kubenswrapper[4916]: E1203 20:26:41.750725 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" containerName="extract-utilities" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.750735 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" containerName="extract-utilities" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.754404 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="a96efff5-5bf8-46f6-a64c-c6db9c40e654" containerName="registry-server" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.754435 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="57c2516a-b9ff-4816-947e-070103fba378" containerName="openstackclient" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.767769 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.774959 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="57c2516a-b9ff-4816-947e-070103fba378" podUID="0e722523-b45d-4256-a08c-088a095f77f5" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.778608 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="74cc0868-0443-4a2e-8623-9e06a16dda25" podUID="0e722523-b45d-4256-a08c-088a095f77f5" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.787515 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.812502 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 03 20:26:41 crc kubenswrapper[4916]: E1203 20:26:41.813382 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle kube-api-access-s4sj6 openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[combined-ca-bundle kube-api-access-s4sj6 openstack-config openstack-config-secret]: context canceled" pod="openstack/openstackclient" podUID="74cc0868-0443-4a2e-8623-9e06a16dda25" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.822860 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.829622 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.831061 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.837664 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.943070 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqkb2\" (UniqueName: \"kubernetes.io/projected/0e722523-b45d-4256-a08c-088a095f77f5-kube-api-access-pqkb2\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.943394 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e722523-b45d-4256-a08c-088a095f77f5-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.943545 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0e722523-b45d-4256-a08c-088a095f77f5-openstack-config-secret\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.943704 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0e722523-b45d-4256-a08c-088a095f77f5-openstack-config\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.971696 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.971948 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-api" containerID="cri-o://380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f" gracePeriod=30 Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.972301 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-listener" containerID="cri-o://acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f" gracePeriod=30 Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.972348 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-notifier" containerID="cri-o://4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9" gracePeriod=30 Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.972381 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-evaluator" containerID="cri-o://28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6" gracePeriod=30 Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.976606 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.980972 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="74cc0868-0443-4a2e-8623-9e06a16dda25" podUID="0e722523-b45d-4256-a08c-088a095f77f5" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.988002 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:26:41 crc kubenswrapper[4916]: I1203 20:26:41.990770 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="74cc0868-0443-4a2e-8623-9e06a16dda25" podUID="0e722523-b45d-4256-a08c-088a095f77f5" Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.045412 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqkb2\" (UniqueName: \"kubernetes.io/projected/0e722523-b45d-4256-a08c-088a095f77f5-kube-api-access-pqkb2\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.045471 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e722523-b45d-4256-a08c-088a095f77f5-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.045555 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0e722523-b45d-4256-a08c-088a095f77f5-openstack-config-secret\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.045612 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0e722523-b45d-4256-a08c-088a095f77f5-openstack-config\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.046499 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0e722523-b45d-4256-a08c-088a095f77f5-openstack-config\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.053119 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0e722523-b45d-4256-a08c-088a095f77f5-openstack-config-secret\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.061337 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e722523-b45d-4256-a08c-088a095f77f5-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.061964 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqkb2\" (UniqueName: \"kubernetes.io/projected/0e722523-b45d-4256-a08c-088a095f77f5-kube-api-access-pqkb2\") pod \"openstackclient\" (UID: \"0e722523-b45d-4256-a08c-088a095f77f5\") " pod="openstack/openstackclient" Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.153251 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.488384 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74cc0868-0443-4a2e-8623-9e06a16dda25" path="/var/lib/kubelet/pods/74cc0868-0443-4a2e-8623-9e06a16dda25/volumes" Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.688835 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 20:26:42 crc kubenswrapper[4916]: W1203 20:26:42.697803 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e722523_b45d_4256_a08c_088a095f77f5.slice/crio-b90a2ad376fe68deaab07c6538ea9b501b41e991dfcdf89cc59e011cf1b33bc9 WatchSource:0}: Error finding container b90a2ad376fe68deaab07c6538ea9b501b41e991dfcdf89cc59e011cf1b33bc9: Status 404 returned error can't find the container with id b90a2ad376fe68deaab07c6538ea9b501b41e991dfcdf89cc59e011cf1b33bc9 Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.995002 4916 generic.go:334] "Generic (PLEG): container finished" podID="157e167d-c691-4baa-acbc-0fc5810b92da" containerID="28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6" exitCode=0 Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.995354 4916 generic.go:334] "Generic (PLEG): container finished" podID="157e167d-c691-4baa-acbc-0fc5810b92da" containerID="380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f" exitCode=0 Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.995092 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"157e167d-c691-4baa-acbc-0fc5810b92da","Type":"ContainerDied","Data":"28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6"} Dec 03 20:26:42 crc kubenswrapper[4916]: I1203 20:26:42.995453 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"157e167d-c691-4baa-acbc-0fc5810b92da","Type":"ContainerDied","Data":"380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f"} Dec 03 20:26:43 crc kubenswrapper[4916]: I1203 20:26:43.000538 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:26:43 crc kubenswrapper[4916]: I1203 20:26:43.001972 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"0e722523-b45d-4256-a08c-088a095f77f5","Type":"ContainerStarted","Data":"d33269b6cb36b9c4516ef081b2fa928285036cd084a627fab4c713805b8b1cb8"} Dec 03 20:26:43 crc kubenswrapper[4916]: I1203 20:26:43.002009 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"0e722523-b45d-4256-a08c-088a095f77f5","Type":"ContainerStarted","Data":"b90a2ad376fe68deaab07c6538ea9b501b41e991dfcdf89cc59e011cf1b33bc9"} Dec 03 20:26:43 crc kubenswrapper[4916]: I1203 20:26:43.025065 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="74cc0868-0443-4a2e-8623-9e06a16dda25" podUID="0e722523-b45d-4256-a08c-088a095f77f5" Dec 03 20:26:43 crc kubenswrapper[4916]: I1203 20:26:43.032082 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.032058497 podStartE2EDuration="2.032058497s" podCreationTimestamp="2025-12-03 20:26:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:26:43.022089294 +0000 UTC m=+3418.984899570" watchObservedRunningTime="2025-12-03 20:26:43.032058497 +0000 UTC m=+3418.994868773" Dec 03 20:26:43 crc kubenswrapper[4916]: I1203 20:26:43.081421 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:26:43 crc kubenswrapper[4916]: I1203 20:26:43.081964 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="prometheus" containerID="cri-o://b121673574e00b9ccf9cb4ba6dfeb7224eeada9aacf60fa1cd440f3d9b83064f" gracePeriod=600 Dec 03 20:26:43 crc kubenswrapper[4916]: I1203 20:26:43.082522 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="thanos-sidecar" containerID="cri-o://c1b2dbcdedade80711ba176f059c7154782d467b287346764bd49a1aba58a9ba" gracePeriod=600 Dec 03 20:26:43 crc kubenswrapper[4916]: I1203 20:26:43.082588 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="config-reloader" containerID="cri-o://37305a55e77abda438f26d1f0f4e3c805298a81f4e95a30440b008f273ceae9e" gracePeriod=600 Dec 03 20:26:43 crc kubenswrapper[4916]: I1203 20:26:43.976605 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.017025 4916 generic.go:334] "Generic (PLEG): container finished" podID="57c2516a-b9ff-4816-947e-070103fba378" containerID="eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205" exitCode=137 Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.017111 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.017133 4916 scope.go:117] "RemoveContainer" containerID="eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.022057 4916 generic.go:334] "Generic (PLEG): container finished" podID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerID="c1b2dbcdedade80711ba176f059c7154782d467b287346764bd49a1aba58a9ba" exitCode=0 Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.022077 4916 generic.go:334] "Generic (PLEG): container finished" podID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerID="37305a55e77abda438f26d1f0f4e3c805298a81f4e95a30440b008f273ceae9e" exitCode=0 Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.022087 4916 generic.go:334] "Generic (PLEG): container finished" podID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerID="b121673574e00b9ccf9cb4ba6dfeb7224eeada9aacf60fa1cd440f3d9b83064f" exitCode=0 Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.022120 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3e9df869-d7b1-401b-b5be-6a174893ce12","Type":"ContainerDied","Data":"c1b2dbcdedade80711ba176f059c7154782d467b287346764bd49a1aba58a9ba"} Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.022140 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3e9df869-d7b1-401b-b5be-6a174893ce12","Type":"ContainerDied","Data":"37305a55e77abda438f26d1f0f4e3c805298a81f4e95a30440b008f273ceae9e"} Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.022151 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3e9df869-d7b1-401b-b5be-6a174893ce12","Type":"ContainerDied","Data":"b121673574e00b9ccf9cb4ba6dfeb7224eeada9aacf60fa1cd440f3d9b83064f"} Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.029503 4916 generic.go:334] "Generic (PLEG): container finished" podID="157e167d-c691-4baa-acbc-0fc5810b92da" containerID="acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f" exitCode=0 Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.029576 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"157e167d-c691-4baa-acbc-0fc5810b92da","Type":"ContainerDied","Data":"acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f"} Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.036077 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.044666 4916 scope.go:117] "RemoveContainer" containerID="eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205" Dec 03 20:26:44 crc kubenswrapper[4916]: E1203 20:26:44.045900 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205\": container with ID starting with eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205 not found: ID does not exist" containerID="eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.045940 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205"} err="failed to get container status \"eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205\": rpc error: code = NotFound desc = could not find container \"eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205\": container with ID starting with eb566744b44afd03bf3cfbba35b882eaf79f1c4412b942c5da3c176428f79205 not found: ID does not exist" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.096357 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/57c2516a-b9ff-4816-947e-070103fba378-openstack-config\") pod \"57c2516a-b9ff-4816-947e-070103fba378\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.096476 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-combined-ca-bundle\") pod \"57c2516a-b9ff-4816-947e-070103fba378\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.096542 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvjw5\" (UniqueName: \"kubernetes.io/projected/57c2516a-b9ff-4816-947e-070103fba378-kube-api-access-lvjw5\") pod \"57c2516a-b9ff-4816-947e-070103fba378\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.096706 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-openstack-config-secret\") pod \"57c2516a-b9ff-4816-947e-070103fba378\" (UID: \"57c2516a-b9ff-4816-947e-070103fba378\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.127779 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57c2516a-b9ff-4816-947e-070103fba378-kube-api-access-lvjw5" (OuterVolumeSpecName: "kube-api-access-lvjw5") pod "57c2516a-b9ff-4816-947e-070103fba378" (UID: "57c2516a-b9ff-4816-947e-070103fba378"). InnerVolumeSpecName "kube-api-access-lvjw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.139259 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57c2516a-b9ff-4816-947e-070103fba378" (UID: "57c2516a-b9ff-4816-947e-070103fba378"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.155121 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57c2516a-b9ff-4816-947e-070103fba378-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "57c2516a-b9ff-4816-947e-070103fba378" (UID: "57c2516a-b9ff-4816-947e-070103fba378"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.164991 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "57c2516a-b9ff-4816-947e-070103fba378" (UID: "57c2516a-b9ff-4816-947e-070103fba378"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.198334 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/3e9df869-d7b1-401b-b5be-6a174893ce12-config-out\") pod \"3e9df869-d7b1-401b-b5be-6a174893ce12\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.198440 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-thanos-prometheus-http-client-file\") pod \"3e9df869-d7b1-401b-b5be-6a174893ce12\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.198473 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-web-config\") pod \"3e9df869-d7b1-401b-b5be-6a174893ce12\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.198493 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-tls-assets\") pod \"3e9df869-d7b1-401b-b5be-6a174893ce12\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.198551 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"3e9df869-d7b1-401b-b5be-6a174893ce12\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.198584 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-config\") pod \"3e9df869-d7b1-401b-b5be-6a174893ce12\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.198606 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqr9m\" (UniqueName: \"kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-kube-api-access-xqr9m\") pod \"3e9df869-d7b1-401b-b5be-6a174893ce12\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.198624 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/3e9df869-d7b1-401b-b5be-6a174893ce12-prometheus-metric-storage-rulefiles-0\") pod \"3e9df869-d7b1-401b-b5be-6a174893ce12\" (UID: \"3e9df869-d7b1-401b-b5be-6a174893ce12\") " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.199181 4916 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.199198 4916 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/57c2516a-b9ff-4816-947e-070103fba378-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.199209 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57c2516a-b9ff-4816-947e-070103fba378-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.199217 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvjw5\" (UniqueName: \"kubernetes.io/projected/57c2516a-b9ff-4816-947e-070103fba378-kube-api-access-lvjw5\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.199919 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e9df869-d7b1-401b-b5be-6a174893ce12-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "3e9df869-d7b1-401b-b5be-6a174893ce12" (UID: "3e9df869-d7b1-401b-b5be-6a174893ce12"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.203305 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "3e9df869-d7b1-401b-b5be-6a174893ce12" (UID: "3e9df869-d7b1-401b-b5be-6a174893ce12"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.203319 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e9df869-d7b1-401b-b5be-6a174893ce12-config-out" (OuterVolumeSpecName: "config-out") pod "3e9df869-d7b1-401b-b5be-6a174893ce12" (UID: "3e9df869-d7b1-401b-b5be-6a174893ce12"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.204147 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-kube-api-access-xqr9m" (OuterVolumeSpecName: "kube-api-access-xqr9m") pod "3e9df869-d7b1-401b-b5be-6a174893ce12" (UID: "3e9df869-d7b1-401b-b5be-6a174893ce12"). InnerVolumeSpecName "kube-api-access-xqr9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.207893 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "3e9df869-d7b1-401b-b5be-6a174893ce12" (UID: "3e9df869-d7b1-401b-b5be-6a174893ce12"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.208032 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "3e9df869-d7b1-401b-b5be-6a174893ce12" (UID: "3e9df869-d7b1-401b-b5be-6a174893ce12"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.218157 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-config" (OuterVolumeSpecName: "config") pod "3e9df869-d7b1-401b-b5be-6a174893ce12" (UID: "3e9df869-d7b1-401b-b5be-6a174893ce12"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.244146 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-web-config" (OuterVolumeSpecName: "web-config") pod "3e9df869-d7b1-401b-b5be-6a174893ce12" (UID: "3e9df869-d7b1-401b-b5be-6a174893ce12"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.300725 4916 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.300758 4916 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-web-config\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.300767 4916 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.300801 4916 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.300810 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/3e9df869-d7b1-401b-b5be-6a174893ce12-config\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.300819 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqr9m\" (UniqueName: \"kubernetes.io/projected/3e9df869-d7b1-401b-b5be-6a174893ce12-kube-api-access-xqr9m\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.300828 4916 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/3e9df869-d7b1-401b-b5be-6a174893ce12-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.300836 4916 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/3e9df869-d7b1-401b-b5be-6a174893ce12-config-out\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.330164 4916 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.336270 4916 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="57c2516a-b9ff-4816-947e-070103fba378" podUID="0e722523-b45d-4256-a08c-088a095f77f5" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.402647 4916 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:44 crc kubenswrapper[4916]: I1203 20:26:44.520992 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57c2516a-b9ff-4816-947e-070103fba378" path="/var/lib/kubelet/pods/57c2516a-b9ff-4816-947e-070103fba378/volumes" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.054176 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"3e9df869-d7b1-401b-b5be-6a174893ce12","Type":"ContainerDied","Data":"6640ecafed773d97a8d0ceba6fcdc540c856c32e06edab56f35dc4f1d54443eb"} Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.054496 4916 scope.go:117] "RemoveContainer" containerID="c1b2dbcdedade80711ba176f059c7154782d467b287346764bd49a1aba58a9ba" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.054297 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.084754 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.099873 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.109078 4916 scope.go:117] "RemoveContainer" containerID="37305a55e77abda438f26d1f0f4e3c805298a81f4e95a30440b008f273ceae9e" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.136848 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:26:45 crc kubenswrapper[4916]: E1203 20:26:45.137485 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="init-config-reloader" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.137508 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="init-config-reloader" Dec 03 20:26:45 crc kubenswrapper[4916]: E1203 20:26:45.137538 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="config-reloader" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.137547 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="config-reloader" Dec 03 20:26:45 crc kubenswrapper[4916]: E1203 20:26:45.137588 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="prometheus" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.137597 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="prometheus" Dec 03 20:26:45 crc kubenswrapper[4916]: E1203 20:26:45.137613 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="thanos-sidecar" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.137622 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="thanos-sidecar" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.137864 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="thanos-sidecar" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.137897 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="config-reloader" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.137915 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" containerName="prometheus" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.140595 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.144546 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.145000 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-fjxqs" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.146638 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.146887 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.147089 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.147335 4916 scope.go:117] "RemoveContainer" containerID="b121673574e00b9ccf9cb4ba6dfeb7224eeada9aacf60fa1cd440f3d9b83064f" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.147425 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.151521 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.153336 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.196697 4916 scope.go:117] "RemoveContainer" containerID="ef22b81fe4fb905f3fa6472061202fd963b78029efdf267d829025640a21e6fd" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.328875 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.329382 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.329782 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/edf45883-e0e2-41cb-af26-450d70626a3a-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.330130 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flbmm\" (UniqueName: \"kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-kube-api-access-flbmm\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.330391 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.330663 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-config\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.330948 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/edf45883-e0e2-41cb-af26-450d70626a3a-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.331173 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.331407 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.331698 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.331920 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.433595 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/edf45883-e0e2-41cb-af26-450d70626a3a-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.433684 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flbmm\" (UniqueName: \"kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-kube-api-access-flbmm\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.433725 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.433761 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-config\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.433817 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/edf45883-e0e2-41cb-af26-450d70626a3a-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.433839 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.433864 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.433897 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.433918 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.433957 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.433992 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.434926 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/edf45883-e0e2-41cb-af26-450d70626a3a-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.435608 4916 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.440351 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-config\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.443132 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.444368 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.444936 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/edf45883-e0e2-41cb-af26-450d70626a3a-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.446037 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.446580 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.446636 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.449631 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.456112 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flbmm\" (UniqueName: \"kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-kube-api-access-flbmm\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.486155 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"prometheus-metric-storage-0\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:45 crc kubenswrapper[4916]: I1203 20:26:45.771076 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:26:46 crc kubenswrapper[4916]: I1203 20:26:46.296977 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:26:46 crc kubenswrapper[4916]: I1203 20:26:46.490250 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e9df869-d7b1-401b-b5be-6a174893ce12" path="/var/lib/kubelet/pods/3e9df869-d7b1-401b-b5be-6a174893ce12/volumes" Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.082438 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"edf45883-e0e2-41cb-af26-450d70626a3a","Type":"ContainerStarted","Data":"adcc3dce1fa0a1abefbf5d61aea8a77ed3c70ebaf3e7c059f77c53790ce6eaf9"} Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.792756 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.885438 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbjfl\" (UniqueName: \"kubernetes.io/projected/157e167d-c691-4baa-acbc-0fc5810b92da-kube-api-access-wbjfl\") pod \"157e167d-c691-4baa-acbc-0fc5810b92da\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.885529 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-internal-tls-certs\") pod \"157e167d-c691-4baa-acbc-0fc5810b92da\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.885593 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-combined-ca-bundle\") pod \"157e167d-c691-4baa-acbc-0fc5810b92da\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.885678 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-public-tls-certs\") pod \"157e167d-c691-4baa-acbc-0fc5810b92da\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.885719 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-scripts\") pod \"157e167d-c691-4baa-acbc-0fc5810b92da\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.885779 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-config-data\") pod \"157e167d-c691-4baa-acbc-0fc5810b92da\" (UID: \"157e167d-c691-4baa-acbc-0fc5810b92da\") " Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.895582 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/157e167d-c691-4baa-acbc-0fc5810b92da-kube-api-access-wbjfl" (OuterVolumeSpecName: "kube-api-access-wbjfl") pod "157e167d-c691-4baa-acbc-0fc5810b92da" (UID: "157e167d-c691-4baa-acbc-0fc5810b92da"). InnerVolumeSpecName "kube-api-access-wbjfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.928473 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-scripts" (OuterVolumeSpecName: "scripts") pod "157e167d-c691-4baa-acbc-0fc5810b92da" (UID: "157e167d-c691-4baa-acbc-0fc5810b92da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.976787 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "157e167d-c691-4baa-acbc-0fc5810b92da" (UID: "157e167d-c691-4baa-acbc-0fc5810b92da"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.988190 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbjfl\" (UniqueName: \"kubernetes.io/projected/157e167d-c691-4baa-acbc-0fc5810b92da-kube-api-access-wbjfl\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.988214 4916 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.988226 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:47 crc kubenswrapper[4916]: I1203 20:26:47.999829 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "157e167d-c691-4baa-acbc-0fc5810b92da" (UID: "157e167d-c691-4baa-acbc-0fc5810b92da"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.068359 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "157e167d-c691-4baa-acbc-0fc5810b92da" (UID: "157e167d-c691-4baa-acbc-0fc5810b92da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.069195 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-config-data" (OuterVolumeSpecName: "config-data") pod "157e167d-c691-4baa-acbc-0fc5810b92da" (UID: "157e167d-c691-4baa-acbc-0fc5810b92da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.090096 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.090122 4916 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.090133 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/157e167d-c691-4baa-acbc-0fc5810b92da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.092036 4916 generic.go:334] "Generic (PLEG): container finished" podID="157e167d-c691-4baa-acbc-0fc5810b92da" containerID="4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9" exitCode=0 Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.092071 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"157e167d-c691-4baa-acbc-0fc5810b92da","Type":"ContainerDied","Data":"4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9"} Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.092097 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"157e167d-c691-4baa-acbc-0fc5810b92da","Type":"ContainerDied","Data":"6557c0b3369971082cf6e55fddf8503134becc41b334923502b2087e61394f93"} Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.092112 4916 scope.go:117] "RemoveContainer" containerID="acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.092110 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.120762 4916 scope.go:117] "RemoveContainer" containerID="4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.144584 4916 scope.go:117] "RemoveContainer" containerID="28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.145108 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.155214 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-0"] Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.163198 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 03 20:26:48 crc kubenswrapper[4916]: E1203 20:26:48.163839 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-listener" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.163913 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-listener" Dec 03 20:26:48 crc kubenswrapper[4916]: E1203 20:26:48.163968 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-api" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.164011 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-api" Dec 03 20:26:48 crc kubenswrapper[4916]: E1203 20:26:48.164063 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-evaluator" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.164116 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-evaluator" Dec 03 20:26:48 crc kubenswrapper[4916]: E1203 20:26:48.164169 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-notifier" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.164212 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-notifier" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.164419 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-api" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.164481 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-notifier" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.164547 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-evaluator" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.164620 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" containerName="aodh-listener" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.166484 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.167215 4916 scope.go:117] "RemoveContainer" containerID="380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.168231 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-internal-svc" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.170241 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.170294 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-cqszc" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.170465 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-public-svc" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.170354 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.176442 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.195486 4916 scope.go:117] "RemoveContainer" containerID="acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f" Dec 03 20:26:48 crc kubenswrapper[4916]: E1203 20:26:48.197919 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f\": container with ID starting with acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f not found: ID does not exist" containerID="acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.197956 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f"} err="failed to get container status \"acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f\": rpc error: code = NotFound desc = could not find container \"acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f\": container with ID starting with acbaa364f5f2aa076798eefe08db06f56ba8e90ddca4cea2ecedd76b1a44373f not found: ID does not exist" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.197978 4916 scope.go:117] "RemoveContainer" containerID="4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9" Dec 03 20:26:48 crc kubenswrapper[4916]: E1203 20:26:48.199016 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9\": container with ID starting with 4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9 not found: ID does not exist" containerID="4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.199067 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9"} err="failed to get container status \"4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9\": rpc error: code = NotFound desc = could not find container \"4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9\": container with ID starting with 4a00a378ec8b60d7ec90fb9e40df5d6c175f575fc70f09cc408af0b475b296b9 not found: ID does not exist" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.199093 4916 scope.go:117] "RemoveContainer" containerID="28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6" Dec 03 20:26:48 crc kubenswrapper[4916]: E1203 20:26:48.199464 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6\": container with ID starting with 28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6 not found: ID does not exist" containerID="28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.199492 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6"} err="failed to get container status \"28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6\": rpc error: code = NotFound desc = could not find container \"28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6\": container with ID starting with 28fcae9324a8512f16d5f26afe27448af4ae59c6b34dac0090fc0e138c909ef6 not found: ID does not exist" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.199508 4916 scope.go:117] "RemoveContainer" containerID="380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f" Dec 03 20:26:48 crc kubenswrapper[4916]: E1203 20:26:48.199771 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f\": container with ID starting with 380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f not found: ID does not exist" containerID="380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.199796 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f"} err="failed to get container status \"380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f\": rpc error: code = NotFound desc = could not find container \"380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f\": container with ID starting with 380daa9f0026d02e1ecb67b7759377dd2fc27319369ec2f3da1b7d6dd0bf0f4f not found: ID does not exist" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.292991 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-public-tls-certs\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.293359 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgxnq\" (UniqueName: \"kubernetes.io/projected/311ca648-464f-458e-af51-1514e6ad81c3-kube-api-access-lgxnq\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.293670 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-config-data\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.293958 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-scripts\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.294133 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-internal-tls-certs\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.294310 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-combined-ca-bundle\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.395616 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-config-data\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.395715 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-scripts\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.395732 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-internal-tls-certs\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.395757 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-combined-ca-bundle\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.395801 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-public-tls-certs\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.395821 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgxnq\" (UniqueName: \"kubernetes.io/projected/311ca648-464f-458e-af51-1514e6ad81c3-kube-api-access-lgxnq\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.401597 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-internal-tls-certs\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.401607 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-scripts\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.401744 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-public-tls-certs\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.402982 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-combined-ca-bundle\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.407139 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-config-data\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.496028 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgxnq\" (UniqueName: \"kubernetes.io/projected/311ca648-464f-458e-af51-1514e6ad81c3-kube-api-access-lgxnq\") pod \"aodh-0\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " pod="openstack/aodh-0" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.497372 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="157e167d-c691-4baa-acbc-0fc5810b92da" path="/var/lib/kubelet/pods/157e167d-c691-4baa-acbc-0fc5810b92da/volumes" Dec 03 20:26:48 crc kubenswrapper[4916]: I1203 20:26:48.792505 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:26:49 crc kubenswrapper[4916]: W1203 20:26:49.493341 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod311ca648_464f_458e_af51_1514e6ad81c3.slice/crio-a9c4e0c59e73df398bb8a0a960992f53cda89519c5e3b0f33eb4533b3d64a5ae WatchSource:0}: Error finding container a9c4e0c59e73df398bb8a0a960992f53cda89519c5e3b0f33eb4533b3d64a5ae: Status 404 returned error can't find the container with id a9c4e0c59e73df398bb8a0a960992f53cda89519c5e3b0f33eb4533b3d64a5ae Dec 03 20:26:49 crc kubenswrapper[4916]: I1203 20:26:49.494401 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 20:26:50 crc kubenswrapper[4916]: I1203 20:26:50.150450 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"311ca648-464f-458e-af51-1514e6ad81c3","Type":"ContainerStarted","Data":"a9c4e0c59e73df398bb8a0a960992f53cda89519c5e3b0f33eb4533b3d64a5ae"} Dec 03 20:26:51 crc kubenswrapper[4916]: I1203 20:26:51.168289 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"edf45883-e0e2-41cb-af26-450d70626a3a","Type":"ContainerStarted","Data":"8d879f36e27e93976461ead9838aede6ff91ee8ef1857e6915bf38f417d24e1a"} Dec 03 20:26:51 crc kubenswrapper[4916]: I1203 20:26:51.171685 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"311ca648-464f-458e-af51-1514e6ad81c3","Type":"ContainerStarted","Data":"b1285f8e38f798ce20c783d549710f373368770048ef743fa22a7b7c34d72e31"} Dec 03 20:26:51 crc kubenswrapper[4916]: I1203 20:26:51.171771 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"311ca648-464f-458e-af51-1514e6ad81c3","Type":"ContainerStarted","Data":"165b3f7ac53b95bcb3c93c676d753c7c7005ed65857fefd2a6f89259f849a112"} Dec 03 20:26:52 crc kubenswrapper[4916]: I1203 20:26:52.181886 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"311ca648-464f-458e-af51-1514e6ad81c3","Type":"ContainerStarted","Data":"08594c77ed71a1da0642d9c40f8c43a70b2c2cbdfffabfaca5b7786fa409d618"} Dec 03 20:26:52 crc kubenswrapper[4916]: I1203 20:26:52.182416 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"311ca648-464f-458e-af51-1514e6ad81c3","Type":"ContainerStarted","Data":"22a7df9e33183f42754ef8df8a0d6940f900e3bb1c1934ea4081f2fd6713fe0e"} Dec 03 20:26:52 crc kubenswrapper[4916]: I1203 20:26:52.234668 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=1.8554206720000002 podStartE2EDuration="4.234648681s" podCreationTimestamp="2025-12-03 20:26:48 +0000 UTC" firstStartedPulling="2025-12-03 20:26:49.498506116 +0000 UTC m=+3425.461316392" lastFinishedPulling="2025-12-03 20:26:51.877734125 +0000 UTC m=+3427.840544401" observedRunningTime="2025-12-03 20:26:52.207612841 +0000 UTC m=+3428.170423107" watchObservedRunningTime="2025-12-03 20:26:52.234648681 +0000 UTC m=+3428.197458947" Dec 03 20:27:00 crc kubenswrapper[4916]: I1203 20:27:00.275500 4916 generic.go:334] "Generic (PLEG): container finished" podID="edf45883-e0e2-41cb-af26-450d70626a3a" containerID="8d879f36e27e93976461ead9838aede6ff91ee8ef1857e6915bf38f417d24e1a" exitCode=0 Dec 03 20:27:00 crc kubenswrapper[4916]: I1203 20:27:00.276281 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"edf45883-e0e2-41cb-af26-450d70626a3a","Type":"ContainerDied","Data":"8d879f36e27e93976461ead9838aede6ff91ee8ef1857e6915bf38f417d24e1a"} Dec 03 20:27:01 crc kubenswrapper[4916]: I1203 20:27:01.293186 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"edf45883-e0e2-41cb-af26-450d70626a3a","Type":"ContainerStarted","Data":"84a1354e8a8d54588b27892309e44beaefb679ceb1aba1258c165848fc5e7139"} Dec 03 20:27:05 crc kubenswrapper[4916]: I1203 20:27:05.344757 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"edf45883-e0e2-41cb-af26-450d70626a3a","Type":"ContainerStarted","Data":"652583a12ff3ff3719d822fa1a3bc49752d32829fd53a29d39a22893b74e74c4"} Dec 03 20:27:06 crc kubenswrapper[4916]: I1203 20:27:06.363895 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"edf45883-e0e2-41cb-af26-450d70626a3a","Type":"ContainerStarted","Data":"f3f882359847354099903f21f32491e0fb40aafaac22c67a4e9aa90928bea5e2"} Dec 03 20:27:06 crc kubenswrapper[4916]: I1203 20:27:06.410762 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=21.410730898 podStartE2EDuration="21.410730898s" podCreationTimestamp="2025-12-03 20:26:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:27:06.404250536 +0000 UTC m=+3442.367060872" watchObservedRunningTime="2025-12-03 20:27:06.410730898 +0000 UTC m=+3442.373541204" Dec 03 20:27:10 crc kubenswrapper[4916]: I1203 20:27:10.772247 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 03 20:27:15 crc kubenswrapper[4916]: I1203 20:27:15.772005 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 03 20:27:15 crc kubenswrapper[4916]: I1203 20:27:15.780413 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 03 20:27:16 crc kubenswrapper[4916]: I1203 20:27:16.509237 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:43 crc kubenswrapper[4916]: I1203 20:28:43.616686 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_6f4635b6-2410-4d5f-a7c9-3cf0a04739f7/manager/0.log" Dec 03 20:28:45 crc kubenswrapper[4916]: I1203 20:28:45.677208 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:28:45 crc kubenswrapper[4916]: I1203 20:28:45.678137 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="thanos-sidecar" containerID="cri-o://f3f882359847354099903f21f32491e0fb40aafaac22c67a4e9aa90928bea5e2" gracePeriod=600 Dec 03 20:28:45 crc kubenswrapper[4916]: I1203 20:28:45.678505 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="config-reloader" containerID="cri-o://652583a12ff3ff3719d822fa1a3bc49752d32829fd53a29d39a22893b74e74c4" gracePeriod=600 Dec 03 20:28:45 crc kubenswrapper[4916]: I1203 20:28:45.682326 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="prometheus" containerID="cri-o://84a1354e8a8d54588b27892309e44beaefb679ceb1aba1258c165848fc5e7139" gracePeriod=600 Dec 03 20:28:45 crc kubenswrapper[4916]: I1203 20:28:45.772413 4916 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="prometheus" probeResult="failure" output="Get \"https://10.217.1.13:9090/-/ready\": dial tcp 10.217.1.13:9090: connect: connection refused" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.158645 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.159124 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.580469 4916 generic.go:334] "Generic (PLEG): container finished" podID="edf45883-e0e2-41cb-af26-450d70626a3a" containerID="f3f882359847354099903f21f32491e0fb40aafaac22c67a4e9aa90928bea5e2" exitCode=0 Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.580510 4916 generic.go:334] "Generic (PLEG): container finished" podID="edf45883-e0e2-41cb-af26-450d70626a3a" containerID="652583a12ff3ff3719d822fa1a3bc49752d32829fd53a29d39a22893b74e74c4" exitCode=0 Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.580518 4916 generic.go:334] "Generic (PLEG): container finished" podID="edf45883-e0e2-41cb-af26-450d70626a3a" containerID="84a1354e8a8d54588b27892309e44beaefb679ceb1aba1258c165848fc5e7139" exitCode=0 Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.580537 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"edf45883-e0e2-41cb-af26-450d70626a3a","Type":"ContainerDied","Data":"f3f882359847354099903f21f32491e0fb40aafaac22c67a4e9aa90928bea5e2"} Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.580589 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"edf45883-e0e2-41cb-af26-450d70626a3a","Type":"ContainerDied","Data":"652583a12ff3ff3719d822fa1a3bc49752d32829fd53a29d39a22893b74e74c4"} Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.580610 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"edf45883-e0e2-41cb-af26-450d70626a3a","Type":"ContainerDied","Data":"84a1354e8a8d54588b27892309e44beaefb679ceb1aba1258c165848fc5e7139"} Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.777678 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.912902 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"edf45883-e0e2-41cb-af26-450d70626a3a\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.913002 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-tls-assets\") pod \"edf45883-e0e2-41cb-af26-450d70626a3a\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.913050 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-config\") pod \"edf45883-e0e2-41cb-af26-450d70626a3a\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.913087 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-secret-combined-ca-bundle\") pod \"edf45883-e0e2-41cb-af26-450d70626a3a\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.913119 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/edf45883-e0e2-41cb-af26-450d70626a3a-config-out\") pod \"edf45883-e0e2-41cb-af26-450d70626a3a\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.913182 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"edf45883-e0e2-41cb-af26-450d70626a3a\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.913230 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config\") pod \"edf45883-e0e2-41cb-af26-450d70626a3a\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.913264 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"edf45883-e0e2-41cb-af26-450d70626a3a\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.913330 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/edf45883-e0e2-41cb-af26-450d70626a3a-prometheus-metric-storage-rulefiles-0\") pod \"edf45883-e0e2-41cb-af26-450d70626a3a\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.913391 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-thanos-prometheus-http-client-file\") pod \"edf45883-e0e2-41cb-af26-450d70626a3a\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.913433 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flbmm\" (UniqueName: \"kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-kube-api-access-flbmm\") pod \"edf45883-e0e2-41cb-af26-450d70626a3a\" (UID: \"edf45883-e0e2-41cb-af26-450d70626a3a\") " Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.915268 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/edf45883-e0e2-41cb-af26-450d70626a3a-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "edf45883-e0e2-41cb-af26-450d70626a3a" (UID: "edf45883-e0e2-41cb-af26-450d70626a3a"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.922501 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edf45883-e0e2-41cb-af26-450d70626a3a-config-out" (OuterVolumeSpecName: "config-out") pod "edf45883-e0e2-41cb-af26-450d70626a3a" (UID: "edf45883-e0e2-41cb-af26-450d70626a3a"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.922552 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "edf45883-e0e2-41cb-af26-450d70626a3a" (UID: "edf45883-e0e2-41cb-af26-450d70626a3a"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.922536 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-kube-api-access-flbmm" (OuterVolumeSpecName: "kube-api-access-flbmm") pod "edf45883-e0e2-41cb-af26-450d70626a3a" (UID: "edf45883-e0e2-41cb-af26-450d70626a3a"). InnerVolumeSpecName "kube-api-access-flbmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.923014 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "edf45883-e0e2-41cb-af26-450d70626a3a" (UID: "edf45883-e0e2-41cb-af26-450d70626a3a"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.924742 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "edf45883-e0e2-41cb-af26-450d70626a3a" (UID: "edf45883-e0e2-41cb-af26-450d70626a3a"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.924855 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "edf45883-e0e2-41cb-af26-450d70626a3a" (UID: "edf45883-e0e2-41cb-af26-450d70626a3a"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.925198 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "edf45883-e0e2-41cb-af26-450d70626a3a" (UID: "edf45883-e0e2-41cb-af26-450d70626a3a"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.927548 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "edf45883-e0e2-41cb-af26-450d70626a3a" (UID: "edf45883-e0e2-41cb-af26-450d70626a3a"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 20:28:46 crc kubenswrapper[4916]: I1203 20:28:46.933720 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-config" (OuterVolumeSpecName: "config") pod "edf45883-e0e2-41cb-af26-450d70626a3a" (UID: "edf45883-e0e2-41cb-af26-450d70626a3a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.012867 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config" (OuterVolumeSpecName: "web-config") pod "edf45883-e0e2-41cb-af26-450d70626a3a" (UID: "edf45883-e0e2-41cb-af26-450d70626a3a"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.016051 4916 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/edf45883-e0e2-41cb-af26-450d70626a3a-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.016086 4916 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.016103 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flbmm\" (UniqueName: \"kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-kube-api-access-flbmm\") on node \"crc\" DevicePath \"\"" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.016114 4916 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.016125 4916 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/edf45883-e0e2-41cb-af26-450d70626a3a-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.016135 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-config\") on node \"crc\" DevicePath \"\"" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.016143 4916 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.016152 4916 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/edf45883-e0e2-41cb-af26-450d70626a3a-config-out\") on node \"crc\" DevicePath \"\"" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.016160 4916 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.016170 4916 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/edf45883-e0e2-41cb-af26-450d70626a3a-web-config\") on node \"crc\" DevicePath \"\"" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.016198 4916 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.039405 4916 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.119846 4916 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.599430 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"edf45883-e0e2-41cb-af26-450d70626a3a","Type":"ContainerDied","Data":"adcc3dce1fa0a1abefbf5d61aea8a77ed3c70ebaf3e7c059f77c53790ce6eaf9"} Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.599524 4916 scope.go:117] "RemoveContainer" containerID="f3f882359847354099903f21f32491e0fb40aafaac22c67a4e9aa90928bea5e2" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.599834 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.645334 4916 scope.go:117] "RemoveContainer" containerID="652583a12ff3ff3719d822fa1a3bc49752d32829fd53a29d39a22893b74e74c4" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.682454 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.698465 4916 scope.go:117] "RemoveContainer" containerID="84a1354e8a8d54588b27892309e44beaefb679ceb1aba1258c165848fc5e7139" Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.699518 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:28:47 crc kubenswrapper[4916]: I1203 20:28:47.719102 4916 scope.go:117] "RemoveContainer" containerID="8d879f36e27e93976461ead9838aede6ff91ee8ef1857e6915bf38f417d24e1a" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.392082 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:28:48 crc kubenswrapper[4916]: E1203 20:28:48.392692 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="config-reloader" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.392712 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="config-reloader" Dec 03 20:28:48 crc kubenswrapper[4916]: E1203 20:28:48.392739 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="thanos-sidecar" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.392752 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="thanos-sidecar" Dec 03 20:28:48 crc kubenswrapper[4916]: E1203 20:28:48.392807 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="init-config-reloader" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.392821 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="init-config-reloader" Dec 03 20:28:48 crc kubenswrapper[4916]: E1203 20:28:48.392860 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="prometheus" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.392872 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="prometheus" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.393206 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="prometheus" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.393240 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="config-reloader" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.393259 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" containerName="thanos-sidecar" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.396962 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.400609 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.400812 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-fjxqs" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.401132 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.401432 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.401784 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.406449 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.437460 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.492372 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edf45883-e0e2-41cb-af26-450d70626a3a" path="/var/lib/kubelet/pods/edf45883-e0e2-41cb-af26-450d70626a3a/volumes" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.550038 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.550096 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.550125 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.550146 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w872x\" (UniqueName: \"kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-kube-api-access-w872x\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.550174 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.550244 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.550279 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.550336 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-db\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.550364 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.550382 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.550404 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.652343 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.652416 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.652450 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w872x\" (UniqueName: \"kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-kube-api-access-w872x\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.652488 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.652554 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.652623 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.652707 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-db\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.652747 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.652776 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.652827 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.652859 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.653280 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-db\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.654172 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.657189 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.657469 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.657637 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.657866 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.658448 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.660694 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.665282 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.673595 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w872x\" (UniqueName: \"kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-kube-api-access-w872x\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.869965 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 03 20:28:48 crc kubenswrapper[4916]: I1203 20:28:48.877283 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:49 crc kubenswrapper[4916]: I1203 20:28:49.036166 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:28:49 crc kubenswrapper[4916]: W1203 20:28:49.555810 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84ca7e63_bde0_47e3_a81b_efd7ac93b058.slice/crio-23f93fd3ed01354b0dbe996a8816f5120a63b34e2b390ef91007a8ad2cedba02 WatchSource:0}: Error finding container 23f93fd3ed01354b0dbe996a8816f5120a63b34e2b390ef91007a8ad2cedba02: Status 404 returned error can't find the container with id 23f93fd3ed01354b0dbe996a8816f5120a63b34e2b390ef91007a8ad2cedba02 Dec 03 20:28:49 crc kubenswrapper[4916]: I1203 20:28:49.557413 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:28:49 crc kubenswrapper[4916]: I1203 20:28:49.621894 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"84ca7e63-bde0-47e3-a81b-efd7ac93b058","Type":"ContainerStarted","Data":"23f93fd3ed01354b0dbe996a8816f5120a63b34e2b390ef91007a8ad2cedba02"} Dec 03 20:28:53 crc kubenswrapper[4916]: I1203 20:28:53.664635 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"84ca7e63-bde0-47e3-a81b-efd7ac93b058","Type":"ContainerStarted","Data":"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70"} Dec 03 20:29:01 crc kubenswrapper[4916]: I1203 20:29:01.750426 4916 generic.go:334] "Generic (PLEG): container finished" podID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerID="7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70" exitCode=0 Dec 03 20:29:01 crc kubenswrapper[4916]: I1203 20:29:01.750548 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"84ca7e63-bde0-47e3-a81b-efd7ac93b058","Type":"ContainerDied","Data":"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70"} Dec 03 20:29:02 crc kubenswrapper[4916]: I1203 20:29:02.765409 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"84ca7e63-bde0-47e3-a81b-efd7ac93b058","Type":"ContainerStarted","Data":"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39"} Dec 03 20:29:06 crc kubenswrapper[4916]: I1203 20:29:06.820604 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"84ca7e63-bde0-47e3-a81b-efd7ac93b058","Type":"ContainerStarted","Data":"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9"} Dec 03 20:29:06 crc kubenswrapper[4916]: I1203 20:29:06.821281 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"84ca7e63-bde0-47e3-a81b-efd7ac93b058","Type":"ContainerStarted","Data":"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528"} Dec 03 20:29:06 crc kubenswrapper[4916]: I1203 20:29:06.862168 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=18.86214271 podStartE2EDuration="18.86214271s" podCreationTimestamp="2025-12-03 20:28:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:29:06.848930038 +0000 UTC m=+3562.811740314" watchObservedRunningTime="2025-12-03 20:29:06.86214271 +0000 UTC m=+3562.824953016" Dec 03 20:29:09 crc kubenswrapper[4916]: I1203 20:29:09.037209 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 03 20:29:16 crc kubenswrapper[4916]: I1203 20:29:16.158536 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:29:16 crc kubenswrapper[4916]: I1203 20:29:16.159203 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:29:19 crc kubenswrapper[4916]: I1203 20:29:19.036865 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 03 20:29:19 crc kubenswrapper[4916]: I1203 20:29:19.044511 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 03 20:29:20 crc kubenswrapper[4916]: I1203 20:29:20.241430 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 03 20:29:46 crc kubenswrapper[4916]: I1203 20:29:46.159285 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:29:46 crc kubenswrapper[4916]: I1203 20:29:46.160035 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:29:46 crc kubenswrapper[4916]: I1203 20:29:46.160101 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 20:29:46 crc kubenswrapper[4916]: I1203 20:29:46.174870 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 20:29:46 crc kubenswrapper[4916]: I1203 20:29:46.175081 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" gracePeriod=600 Dec 03 20:29:46 crc kubenswrapper[4916]: E1203 20:29:46.308361 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:29:46 crc kubenswrapper[4916]: I1203 20:29:46.606104 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" exitCode=0 Dec 03 20:29:46 crc kubenswrapper[4916]: I1203 20:29:46.606184 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812"} Dec 03 20:29:46 crc kubenswrapper[4916]: I1203 20:29:46.606615 4916 scope.go:117] "RemoveContainer" containerID="ef7f0108f788da434d859e9f496d32e519da7456f6633c29dee92073a20401b4" Dec 03 20:29:46 crc kubenswrapper[4916]: I1203 20:29:46.607549 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:29:46 crc kubenswrapper[4916]: E1203 20:29:46.608087 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:29:58 crc kubenswrapper[4916]: I1203 20:29:58.478353 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:29:58 crc kubenswrapper[4916]: E1203 20:29:58.479287 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.169347 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827"] Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.171820 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.174699 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.176856 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.185449 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827"] Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.261738 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94phx\" (UniqueName: \"kubernetes.io/projected/1239e503-a708-483d-ac6c-30413fd44738-kube-api-access-94phx\") pod \"collect-profiles-29413230-gr827\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.262503 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1239e503-a708-483d-ac6c-30413fd44738-secret-volume\") pod \"collect-profiles-29413230-gr827\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.262941 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1239e503-a708-483d-ac6c-30413fd44738-config-volume\") pod \"collect-profiles-29413230-gr827\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.364275 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94phx\" (UniqueName: \"kubernetes.io/projected/1239e503-a708-483d-ac6c-30413fd44738-kube-api-access-94phx\") pod \"collect-profiles-29413230-gr827\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.364510 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1239e503-a708-483d-ac6c-30413fd44738-secret-volume\") pod \"collect-profiles-29413230-gr827\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.364728 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1239e503-a708-483d-ac6c-30413fd44738-config-volume\") pod \"collect-profiles-29413230-gr827\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.367818 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1239e503-a708-483d-ac6c-30413fd44738-config-volume\") pod \"collect-profiles-29413230-gr827\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.377518 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1239e503-a708-483d-ac6c-30413fd44738-secret-volume\") pod \"collect-profiles-29413230-gr827\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.383260 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94phx\" (UniqueName: \"kubernetes.io/projected/1239e503-a708-483d-ac6c-30413fd44738-kube-api-access-94phx\") pod \"collect-profiles-29413230-gr827\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:00 crc kubenswrapper[4916]: I1203 20:30:00.499109 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:01 crc kubenswrapper[4916]: I1203 20:30:01.031444 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827"] Dec 03 20:30:01 crc kubenswrapper[4916]: W1203 20:30:01.036442 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1239e503_a708_483d_ac6c_30413fd44738.slice/crio-3a1d44d459243a5c9383fedc0b4d61cb02acc9c9356d4c7d01cf708b12b51e05 WatchSource:0}: Error finding container 3a1d44d459243a5c9383fedc0b4d61cb02acc9c9356d4c7d01cf708b12b51e05: Status 404 returned error can't find the container with id 3a1d44d459243a5c9383fedc0b4d61cb02acc9c9356d4c7d01cf708b12b51e05 Dec 03 20:30:01 crc kubenswrapper[4916]: I1203 20:30:01.797808 4916 generic.go:334] "Generic (PLEG): container finished" podID="1239e503-a708-483d-ac6c-30413fd44738" containerID="c518b8f3e7d7c2f0206b433892071d8a662ca943be21b4b2e7854f30c3afff9d" exitCode=0 Dec 03 20:30:01 crc kubenswrapper[4916]: I1203 20:30:01.797963 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" event={"ID":"1239e503-a708-483d-ac6c-30413fd44738","Type":"ContainerDied","Data":"c518b8f3e7d7c2f0206b433892071d8a662ca943be21b4b2e7854f30c3afff9d"} Dec 03 20:30:01 crc kubenswrapper[4916]: I1203 20:30:01.798117 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" event={"ID":"1239e503-a708-483d-ac6c-30413fd44738","Type":"ContainerStarted","Data":"3a1d44d459243a5c9383fedc0b4d61cb02acc9c9356d4c7d01cf708b12b51e05"} Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.251044 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.433352 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1239e503-a708-483d-ac6c-30413fd44738-config-volume\") pod \"1239e503-a708-483d-ac6c-30413fd44738\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.433423 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1239e503-a708-483d-ac6c-30413fd44738-secret-volume\") pod \"1239e503-a708-483d-ac6c-30413fd44738\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.433509 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94phx\" (UniqueName: \"kubernetes.io/projected/1239e503-a708-483d-ac6c-30413fd44738-kube-api-access-94phx\") pod \"1239e503-a708-483d-ac6c-30413fd44738\" (UID: \"1239e503-a708-483d-ac6c-30413fd44738\") " Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.434056 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1239e503-a708-483d-ac6c-30413fd44738-config-volume" (OuterVolumeSpecName: "config-volume") pod "1239e503-a708-483d-ac6c-30413fd44738" (UID: "1239e503-a708-483d-ac6c-30413fd44738"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.439622 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1239e503-a708-483d-ac6c-30413fd44738-kube-api-access-94phx" (OuterVolumeSpecName: "kube-api-access-94phx") pod "1239e503-a708-483d-ac6c-30413fd44738" (UID: "1239e503-a708-483d-ac6c-30413fd44738"). InnerVolumeSpecName "kube-api-access-94phx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.440964 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1239e503-a708-483d-ac6c-30413fd44738-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1239e503-a708-483d-ac6c-30413fd44738" (UID: "1239e503-a708-483d-ac6c-30413fd44738"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.536254 4916 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1239e503-a708-483d-ac6c-30413fd44738-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.536308 4916 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1239e503-a708-483d-ac6c-30413fd44738-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.536329 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94phx\" (UniqueName: \"kubernetes.io/projected/1239e503-a708-483d-ac6c-30413fd44738-kube-api-access-94phx\") on node \"crc\" DevicePath \"\"" Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.825416 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" event={"ID":"1239e503-a708-483d-ac6c-30413fd44738","Type":"ContainerDied","Data":"3a1d44d459243a5c9383fedc0b4d61cb02acc9c9356d4c7d01cf708b12b51e05"} Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.825913 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a1d44d459243a5c9383fedc0b4d61cb02acc9c9356d4c7d01cf708b12b51e05" Dec 03 20:30:03 crc kubenswrapper[4916]: I1203 20:30:03.825486 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413230-gr827" Dec 03 20:30:04 crc kubenswrapper[4916]: I1203 20:30:04.354741 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969"] Dec 03 20:30:04 crc kubenswrapper[4916]: I1203 20:30:04.381028 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413185-v2969"] Dec 03 20:30:04 crc kubenswrapper[4916]: I1203 20:30:04.498305 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eea933c9-f15e-4e5e-8b14-60e7b80b32b3" path="/var/lib/kubelet/pods/eea933c9-f15e-4e5e-8b14-60e7b80b32b3/volumes" Dec 03 20:30:13 crc kubenswrapper[4916]: I1203 20:30:13.478733 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:30:13 crc kubenswrapper[4916]: E1203 20:30:13.479995 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:30:24 crc kubenswrapper[4916]: I1203 20:30:24.492790 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:30:24 crc kubenswrapper[4916]: E1203 20:30:24.494616 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:30:38 crc kubenswrapper[4916]: I1203 20:30:38.478303 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:30:38 crc kubenswrapper[4916]: E1203 20:30:38.479499 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:30:45 crc kubenswrapper[4916]: I1203 20:30:45.695680 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_6f4635b6-2410-4d5f-a7c9-3cf0a04739f7/manager/0.log" Dec 03 20:30:47 crc kubenswrapper[4916]: I1203 20:30:47.028995 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 03 20:30:47 crc kubenswrapper[4916]: I1203 20:30:47.029609 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-api" containerID="cri-o://165b3f7ac53b95bcb3c93c676d753c7c7005ed65857fefd2a6f89259f849a112" gracePeriod=30 Dec 03 20:30:47 crc kubenswrapper[4916]: I1203 20:30:47.029698 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-notifier" containerID="cri-o://22a7df9e33183f42754ef8df8a0d6940f900e3bb1c1934ea4081f2fd6713fe0e" gracePeriod=30 Dec 03 20:30:47 crc kubenswrapper[4916]: I1203 20:30:47.029721 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-evaluator" containerID="cri-o://b1285f8e38f798ce20c783d549710f373368770048ef743fa22a7b7c34d72e31" gracePeriod=30 Dec 03 20:30:47 crc kubenswrapper[4916]: I1203 20:30:47.029735 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/aodh-0" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-listener" containerID="cri-o://08594c77ed71a1da0642d9c40f8c43a70b2c2cbdfffabfaca5b7786fa409d618" gracePeriod=30 Dec 03 20:30:47 crc kubenswrapper[4916]: I1203 20:30:47.072942 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-4sqlz"] Dec 03 20:30:47 crc kubenswrapper[4916]: I1203 20:30:47.082076 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-4sqlz"] Dec 03 20:30:47 crc kubenswrapper[4916]: I1203 20:30:47.396125 4916 generic.go:334] "Generic (PLEG): container finished" podID="311ca648-464f-458e-af51-1514e6ad81c3" containerID="165b3f7ac53b95bcb3c93c676d753c7c7005ed65857fefd2a6f89259f849a112" exitCode=0 Dec 03 20:30:47 crc kubenswrapper[4916]: I1203 20:30:47.396257 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"311ca648-464f-458e-af51-1514e6ad81c3","Type":"ContainerDied","Data":"165b3f7ac53b95bcb3c93c676d753c7c7005ed65857fefd2a6f89259f849a112"} Dec 03 20:30:48 crc kubenswrapper[4916]: I1203 20:30:48.043583 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-5a36-account-create-update-mqtnh"] Dec 03 20:30:48 crc kubenswrapper[4916]: I1203 20:30:48.055406 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-5a36-account-create-update-mqtnh"] Dec 03 20:30:48 crc kubenswrapper[4916]: I1203 20:30:48.410889 4916 generic.go:334] "Generic (PLEG): container finished" podID="311ca648-464f-458e-af51-1514e6ad81c3" containerID="b1285f8e38f798ce20c783d549710f373368770048ef743fa22a7b7c34d72e31" exitCode=0 Dec 03 20:30:48 crc kubenswrapper[4916]: I1203 20:30:48.410942 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"311ca648-464f-458e-af51-1514e6ad81c3","Type":"ContainerDied","Data":"b1285f8e38f798ce20c783d549710f373368770048ef743fa22a7b7c34d72e31"} Dec 03 20:30:48 crc kubenswrapper[4916]: I1203 20:30:48.492628 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a91d358-d4b5-4314-ac74-e681d52598ca" path="/var/lib/kubelet/pods/5a91d358-d4b5-4314-ac74-e681d52598ca/volumes" Dec 03 20:30:48 crc kubenswrapper[4916]: I1203 20:30:48.493769 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb2a64b8-3927-4574-82be-abc3d0b7d92c" path="/var/lib/kubelet/pods/eb2a64b8-3927-4574-82be-abc3d0b7d92c/volumes" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.425632 4916 generic.go:334] "Generic (PLEG): container finished" podID="311ca648-464f-458e-af51-1514e6ad81c3" containerID="08594c77ed71a1da0642d9c40f8c43a70b2c2cbdfffabfaca5b7786fa409d618" exitCode=0 Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.425914 4916 generic.go:334] "Generic (PLEG): container finished" podID="311ca648-464f-458e-af51-1514e6ad81c3" containerID="22a7df9e33183f42754ef8df8a0d6940f900e3bb1c1934ea4081f2fd6713fe0e" exitCode=0 Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.425797 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"311ca648-464f-458e-af51-1514e6ad81c3","Type":"ContainerDied","Data":"08594c77ed71a1da0642d9c40f8c43a70b2c2cbdfffabfaca5b7786fa409d618"} Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.425948 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"311ca648-464f-458e-af51-1514e6ad81c3","Type":"ContainerDied","Data":"22a7df9e33183f42754ef8df8a0d6940f900e3bb1c1934ea4081f2fd6713fe0e"} Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.601311 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.689581 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-public-tls-certs\") pod \"311ca648-464f-458e-af51-1514e6ad81c3\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.689716 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-internal-tls-certs\") pod \"311ca648-464f-458e-af51-1514e6ad81c3\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.689762 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgxnq\" (UniqueName: \"kubernetes.io/projected/311ca648-464f-458e-af51-1514e6ad81c3-kube-api-access-lgxnq\") pod \"311ca648-464f-458e-af51-1514e6ad81c3\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.689783 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-config-data\") pod \"311ca648-464f-458e-af51-1514e6ad81c3\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.689811 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-scripts\") pod \"311ca648-464f-458e-af51-1514e6ad81c3\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.689848 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-combined-ca-bundle\") pod \"311ca648-464f-458e-af51-1514e6ad81c3\" (UID: \"311ca648-464f-458e-af51-1514e6ad81c3\") " Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.696863 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/311ca648-464f-458e-af51-1514e6ad81c3-kube-api-access-lgxnq" (OuterVolumeSpecName: "kube-api-access-lgxnq") pod "311ca648-464f-458e-af51-1514e6ad81c3" (UID: "311ca648-464f-458e-af51-1514e6ad81c3"). InnerVolumeSpecName "kube-api-access-lgxnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.698998 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-scripts" (OuterVolumeSpecName: "scripts") pod "311ca648-464f-458e-af51-1514e6ad81c3" (UID: "311ca648-464f-458e-af51-1514e6ad81c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.760005 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "311ca648-464f-458e-af51-1514e6ad81c3" (UID: "311ca648-464f-458e-af51-1514e6ad81c3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.761082 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "311ca648-464f-458e-af51-1514e6ad81c3" (UID: "311ca648-464f-458e-af51-1514e6ad81c3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.792635 4916 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.792662 4916 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.792671 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgxnq\" (UniqueName: \"kubernetes.io/projected/311ca648-464f-458e-af51-1514e6ad81c3-kube-api-access-lgxnq\") on node \"crc\" DevicePath \"\"" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.792680 4916 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.834424 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-config-data" (OuterVolumeSpecName: "config-data") pod "311ca648-464f-458e-af51-1514e6ad81c3" (UID: "311ca648-464f-458e-af51-1514e6ad81c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.840232 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "311ca648-464f-458e-af51-1514e6ad81c3" (UID: "311ca648-464f-458e-af51-1514e6ad81c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.898399 4916 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 20:30:49 crc kubenswrapper[4916]: I1203 20:30:49.898430 4916 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/311ca648-464f-458e-af51-1514e6ad81c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.440829 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"311ca648-464f-458e-af51-1514e6ad81c3","Type":"ContainerDied","Data":"a9c4e0c59e73df398bb8a0a960992f53cda89519c5e3b0f33eb4533b3d64a5ae"} Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.440882 4916 scope.go:117] "RemoveContainer" containerID="08594c77ed71a1da0642d9c40f8c43a70b2c2cbdfffabfaca5b7786fa409d618" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.440954 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.477591 4916 scope.go:117] "RemoveContainer" containerID="22a7df9e33183f42754ef8df8a0d6940f900e3bb1c1934ea4081f2fd6713fe0e" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.492601 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-0"] Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.492656 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-0"] Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.509662 4916 scope.go:117] "RemoveContainer" containerID="b1285f8e38f798ce20c783d549710f373368770048ef743fa22a7b7c34d72e31" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.542444 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Dec 03 20:30:50 crc kubenswrapper[4916]: E1203 20:30:50.542894 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-api" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.542911 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-api" Dec 03 20:30:50 crc kubenswrapper[4916]: E1203 20:30:50.542920 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-notifier" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.542928 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-notifier" Dec 03 20:30:50 crc kubenswrapper[4916]: E1203 20:30:50.542950 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-listener" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.542957 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-listener" Dec 03 20:30:50 crc kubenswrapper[4916]: E1203 20:30:50.542983 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-evaluator" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.542989 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-evaluator" Dec 03 20:30:50 crc kubenswrapper[4916]: E1203 20:30:50.543005 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1239e503-a708-483d-ac6c-30413fd44738" containerName="collect-profiles" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.543010 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="1239e503-a708-483d-ac6c-30413fd44738" containerName="collect-profiles" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.543191 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="1239e503-a708-483d-ac6c-30413fd44738" containerName="collect-profiles" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.543211 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-listener" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.543231 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-notifier" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.543239 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-evaluator" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.543254 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="311ca648-464f-458e-af51-1514e6ad81c3" containerName="aodh-api" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.545145 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.548010 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.548204 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.548433 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-public-svc" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.549056 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-cqszc" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.549253 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-aodh-internal-svc" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.582853 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.593054 4916 scope.go:117] "RemoveContainer" containerID="165b3f7ac53b95bcb3c93c676d753c7c7005ed65857fefd2a6f89259f849a112" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.611759 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf8gw\" (UniqueName: \"kubernetes.io/projected/2002a42d-6cf6-42dd-99b1-df69ff30bc53-kube-api-access-zf8gw\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.611835 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-scripts\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.612389 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-internal-tls-certs\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.612679 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-public-tls-certs\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.612721 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-config-data\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.612736 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-combined-ca-bundle\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.714592 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-internal-tls-certs\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.714704 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-public-tls-certs\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.714722 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-combined-ca-bundle\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.714740 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-config-data\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.714802 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf8gw\" (UniqueName: \"kubernetes.io/projected/2002a42d-6cf6-42dd-99b1-df69ff30bc53-kube-api-access-zf8gw\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.714835 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-scripts\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.719354 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-public-tls-certs\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.720006 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-internal-tls-certs\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.726870 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-config-data\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.730021 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-scripts\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.732172 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf8gw\" (UniqueName: \"kubernetes.io/projected/2002a42d-6cf6-42dd-99b1-df69ff30bc53-kube-api-access-zf8gw\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.739948 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2002a42d-6cf6-42dd-99b1-df69ff30bc53-combined-ca-bundle\") pod \"aodh-0\" (UID: \"2002a42d-6cf6-42dd-99b1-df69ff30bc53\") " pod="openstack/aodh-0" Dec 03 20:30:50 crc kubenswrapper[4916]: I1203 20:30:50.871426 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Dec 03 20:30:51 crc kubenswrapper[4916]: I1203 20:30:51.346078 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Dec 03 20:30:51 crc kubenswrapper[4916]: I1203 20:30:51.356276 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 20:30:51 crc kubenswrapper[4916]: I1203 20:30:51.454364 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"2002a42d-6cf6-42dd-99b1-df69ff30bc53","Type":"ContainerStarted","Data":"cf9a25710c3acf7ee5241423eae22efe1a0429febb540cc378e46adfde8a5fad"} Dec 03 20:30:52 crc kubenswrapper[4916]: I1203 20:30:52.464149 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"2002a42d-6cf6-42dd-99b1-df69ff30bc53","Type":"ContainerStarted","Data":"143151e8352d93bb87f3d3752aabbfd1a7622109b1b828387b4925c7ed73931d"} Dec 03 20:30:52 crc kubenswrapper[4916]: I1203 20:30:52.490636 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="311ca648-464f-458e-af51-1514e6ad81c3" path="/var/lib/kubelet/pods/311ca648-464f-458e-af51-1514e6ad81c3/volumes" Dec 03 20:30:53 crc kubenswrapper[4916]: I1203 20:30:53.477982 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:30:53 crc kubenswrapper[4916]: I1203 20:30:53.479139 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"2002a42d-6cf6-42dd-99b1-df69ff30bc53","Type":"ContainerStarted","Data":"05ef1f3fc9053683a64fe7a0f8724321eb68f21cbbe5c26adb06628307dd6339"} Dec 03 20:30:53 crc kubenswrapper[4916]: E1203 20:30:53.480061 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:30:54 crc kubenswrapper[4916]: I1203 20:30:54.491598 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"2002a42d-6cf6-42dd-99b1-df69ff30bc53","Type":"ContainerStarted","Data":"85a76410ba9a4e9e11ecafc9f67584738fc4a63c8e787c8581a5ff644d689c7c"} Dec 03 20:30:54 crc kubenswrapper[4916]: I1203 20:30:54.491845 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"2002a42d-6cf6-42dd-99b1-df69ff30bc53","Type":"ContainerStarted","Data":"b97a6ffd275e539a09c03fb9836048a2dcacb93e7767b7dc6bad7e99fbd5447a"} Dec 03 20:30:54 crc kubenswrapper[4916]: I1203 20:30:54.536455 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=1.9407862329999999 podStartE2EDuration="4.536438557s" podCreationTimestamp="2025-12-03 20:30:50 +0000 UTC" firstStartedPulling="2025-12-03 20:30:51.356058529 +0000 UTC m=+3667.318868795" lastFinishedPulling="2025-12-03 20:30:53.951710813 +0000 UTC m=+3669.914521119" observedRunningTime="2025-12-03 20:30:54.533240712 +0000 UTC m=+3670.496050988" watchObservedRunningTime="2025-12-03 20:30:54.536438557 +0000 UTC m=+3670.499248823" Dec 03 20:30:54 crc kubenswrapper[4916]: I1203 20:30:54.807595 4916 scope.go:117] "RemoveContainer" containerID="efe20865ebbda02b6521268314f8b5287ec38531b5af74b5b52eab3283db42f7" Dec 03 20:30:54 crc kubenswrapper[4916]: I1203 20:30:54.838301 4916 scope.go:117] "RemoveContainer" containerID="24d847fa2195a6bdb3626d0b95631089d41401ef649704591ad523c23b613b44" Dec 03 20:30:54 crc kubenswrapper[4916]: I1203 20:30:54.879508 4916 scope.go:117] "RemoveContainer" containerID="659bc0e01e60b0673ce5df4b220eebca2920cb02b54472405a0a332e76c5d168" Dec 03 20:30:58 crc kubenswrapper[4916]: I1203 20:30:58.041688 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-69lwf"] Dec 03 20:30:58 crc kubenswrapper[4916]: I1203 20:30:58.054558 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-69lwf"] Dec 03 20:30:58 crc kubenswrapper[4916]: I1203 20:30:58.497497 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="070ffe05-42a7-471d-a027-886ec97d915c" path="/var/lib/kubelet/pods/070ffe05-42a7-471d-a027-886ec97d915c/volumes" Dec 03 20:31:04 crc kubenswrapper[4916]: I1203 20:31:04.496904 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:31:04 crc kubenswrapper[4916]: E1203 20:31:04.497979 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:31:19 crc kubenswrapper[4916]: I1203 20:31:19.478222 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:31:19 crc kubenswrapper[4916]: E1203 20:31:19.479301 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:31:31 crc kubenswrapper[4916]: I1203 20:31:31.479558 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:31:31 crc kubenswrapper[4916]: E1203 20:31:31.480531 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:31:46 crc kubenswrapper[4916]: I1203 20:31:46.478329 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:31:46 crc kubenswrapper[4916]: E1203 20:31:46.479384 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:31:55 crc kubenswrapper[4916]: I1203 20:31:55.074663 4916 scope.go:117] "RemoveContainer" containerID="307afd2b67d24f67f95001927848f8449eb93f89eca3f711c977872b16529509" Dec 03 20:31:58 crc kubenswrapper[4916]: I1203 20:31:58.477881 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:31:58 crc kubenswrapper[4916]: E1203 20:31:58.478954 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:32:09 crc kubenswrapper[4916]: I1203 20:32:09.479484 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:32:09 crc kubenswrapper[4916]: E1203 20:32:09.480827 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:32:24 crc kubenswrapper[4916]: I1203 20:32:24.484196 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:32:24 crc kubenswrapper[4916]: E1203 20:32:24.485036 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:32:36 crc kubenswrapper[4916]: I1203 20:32:36.478880 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:32:36 crc kubenswrapper[4916]: E1203 20:32:36.480132 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:32:47 crc kubenswrapper[4916]: I1203 20:32:47.478302 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:32:47 crc kubenswrapper[4916]: E1203 20:32:47.479078 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:32:47 crc kubenswrapper[4916]: I1203 20:32:47.642179 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_6f4635b6-2410-4d5f-a7c9-3cf0a04739f7/manager/0.log" Dec 03 20:32:51 crc kubenswrapper[4916]: I1203 20:32:51.883770 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:32:51 crc kubenswrapper[4916]: I1203 20:32:51.884828 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="thanos-sidecar" containerID="cri-o://dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9" gracePeriod=600 Dec 03 20:32:51 crc kubenswrapper[4916]: I1203 20:32:51.884935 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="config-reloader" containerID="cri-o://25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528" gracePeriod=600 Dec 03 20:32:51 crc kubenswrapper[4916]: I1203 20:32:51.884483 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="prometheus" containerID="cri-o://62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39" gracePeriod=600 Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.398195 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.534465 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-secret-combined-ca-bundle\") pod \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.534534 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w872x\" (UniqueName: \"kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-kube-api-access-w872x\") pod \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.534605 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-tls-assets\") pod \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.534672 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.534703 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-thanos-prometheus-http-client-file\") pod \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.534735 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config\") pod \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.534756 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.534780 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config-out\") pod \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.534831 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-rulefiles-0\") pod \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.534868 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-db\") pod \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.534900 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config\") pod \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\" (UID: \"84ca7e63-bde0-47e3-a81b-efd7ac93b058\") " Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.536686 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "84ca7e63-bde0-47e3-a81b-efd7ac93b058" (UID: "84ca7e63-bde0-47e3-a81b-efd7ac93b058"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.537722 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-db" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "84ca7e63-bde0-47e3-a81b-efd7ac93b058" (UID: "84ca7e63-bde0-47e3-a81b-efd7ac93b058"). InnerVolumeSpecName "prometheus-metric-storage-db". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.540773 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "84ca7e63-bde0-47e3-a81b-efd7ac93b058" (UID: "84ca7e63-bde0-47e3-a81b-efd7ac93b058"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.541167 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-kube-api-access-w872x" (OuterVolumeSpecName: "kube-api-access-w872x") pod "84ca7e63-bde0-47e3-a81b-efd7ac93b058" (UID: "84ca7e63-bde0-47e3-a81b-efd7ac93b058"). InnerVolumeSpecName "kube-api-access-w872x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.543645 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "84ca7e63-bde0-47e3-a81b-efd7ac93b058" (UID: "84ca7e63-bde0-47e3-a81b-efd7ac93b058"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.544087 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "84ca7e63-bde0-47e3-a81b-efd7ac93b058" (UID: "84ca7e63-bde0-47e3-a81b-efd7ac93b058"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.547318 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config" (OuterVolumeSpecName: "config") pod "84ca7e63-bde0-47e3-a81b-efd7ac93b058" (UID: "84ca7e63-bde0-47e3-a81b-efd7ac93b058"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.547840 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "84ca7e63-bde0-47e3-a81b-efd7ac93b058" (UID: "84ca7e63-bde0-47e3-a81b-efd7ac93b058"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.548116 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config-out" (OuterVolumeSpecName: "config-out") pod "84ca7e63-bde0-47e3-a81b-efd7ac93b058" (UID: "84ca7e63-bde0-47e3-a81b-efd7ac93b058"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.549309 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "84ca7e63-bde0-47e3-a81b-efd7ac93b058" (UID: "84ca7e63-bde0-47e3-a81b-efd7ac93b058"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.593476 4916 generic.go:334] "Generic (PLEG): container finished" podID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerID="dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9" exitCode=0 Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.593510 4916 generic.go:334] "Generic (PLEG): container finished" podID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerID="25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528" exitCode=0 Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.593520 4916 generic.go:334] "Generic (PLEG): container finished" podID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerID="62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39" exitCode=0 Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.593539 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"84ca7e63-bde0-47e3-a81b-efd7ac93b058","Type":"ContainerDied","Data":"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9"} Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.593580 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"84ca7e63-bde0-47e3-a81b-efd7ac93b058","Type":"ContainerDied","Data":"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528"} Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.593591 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"84ca7e63-bde0-47e3-a81b-efd7ac93b058","Type":"ContainerDied","Data":"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39"} Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.593603 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"84ca7e63-bde0-47e3-a81b-efd7ac93b058","Type":"ContainerDied","Data":"23f93fd3ed01354b0dbe996a8816f5120a63b34e2b390ef91007a8ad2cedba02"} Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.593621 4916 scope.go:117] "RemoveContainer" containerID="dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.593764 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.629894 4916 scope.go:117] "RemoveContainer" containerID="25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.634645 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config" (OuterVolumeSpecName: "web-config") pod "84ca7e63-bde0-47e3-a81b-efd7ac93b058" (UID: "84ca7e63-bde0-47e3-a81b-efd7ac93b058"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.641699 4916 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.644514 4916 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config\") on node \"crc\" DevicePath \"\"" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.644536 4916 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.644549 4916 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-config-out\") on node \"crc\" DevicePath \"\"" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.644630 4916 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.644778 4916 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/84ca7e63-bde0-47e3-a81b-efd7ac93b058-prometheus-metric-storage-db\") on node \"crc\" DevicePath \"\"" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.644798 4916 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config\") on node \"crc\" DevicePath \"\"" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.644807 4916 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.644816 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w872x\" (UniqueName: \"kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-kube-api-access-w872x\") on node \"crc\" DevicePath \"\"" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.644826 4916 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/84ca7e63-bde0-47e3-a81b-efd7ac93b058-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.644835 4916 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/84ca7e63-bde0-47e3-a81b-efd7ac93b058-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.652824 4916 scope.go:117] "RemoveContainer" containerID="62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.675153 4916 scope.go:117] "RemoveContainer" containerID="7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.690829 4916 scope.go:117] "RemoveContainer" containerID="dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9" Dec 03 20:32:52 crc kubenswrapper[4916]: E1203 20:32:52.693316 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9\": container with ID starting with dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9 not found: ID does not exist" containerID="dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.693376 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9"} err="failed to get container status \"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9\": rpc error: code = NotFound desc = could not find container \"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9\": container with ID starting with dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.693402 4916 scope.go:117] "RemoveContainer" containerID="25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528" Dec 03 20:32:52 crc kubenswrapper[4916]: E1203 20:32:52.693704 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528\": container with ID starting with 25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528 not found: ID does not exist" containerID="25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.693744 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528"} err="failed to get container status \"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528\": rpc error: code = NotFound desc = could not find container \"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528\": container with ID starting with 25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.693757 4916 scope.go:117] "RemoveContainer" containerID="62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39" Dec 03 20:32:52 crc kubenswrapper[4916]: E1203 20:32:52.693983 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39\": container with ID starting with 62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39 not found: ID does not exist" containerID="62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.694008 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39"} err="failed to get container status \"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39\": rpc error: code = NotFound desc = could not find container \"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39\": container with ID starting with 62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.694022 4916 scope.go:117] "RemoveContainer" containerID="7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70" Dec 03 20:32:52 crc kubenswrapper[4916]: E1203 20:32:52.694770 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70\": container with ID starting with 7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70 not found: ID does not exist" containerID="7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.694799 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70"} err="failed to get container status \"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70\": rpc error: code = NotFound desc = could not find container \"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70\": container with ID starting with 7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.694832 4916 scope.go:117] "RemoveContainer" containerID="dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.695170 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9"} err="failed to get container status \"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9\": rpc error: code = NotFound desc = could not find container \"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9\": container with ID starting with dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.695206 4916 scope.go:117] "RemoveContainer" containerID="25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.695473 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528"} err="failed to get container status \"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528\": rpc error: code = NotFound desc = could not find container \"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528\": container with ID starting with 25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.695504 4916 scope.go:117] "RemoveContainer" containerID="62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.695924 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39"} err="failed to get container status \"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39\": rpc error: code = NotFound desc = could not find container \"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39\": container with ID starting with 62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.695948 4916 scope.go:117] "RemoveContainer" containerID="7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.696222 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70"} err="failed to get container status \"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70\": rpc error: code = NotFound desc = could not find container \"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70\": container with ID starting with 7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.696244 4916 scope.go:117] "RemoveContainer" containerID="dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.696428 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9"} err="failed to get container status \"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9\": rpc error: code = NotFound desc = could not find container \"dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9\": container with ID starting with dda98808f9d6357a8eb3a71cb70fe84ea8e5369429f28ed46c4a7d06e24c1cf9 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.696447 4916 scope.go:117] "RemoveContainer" containerID="25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.696706 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528"} err="failed to get container status \"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528\": rpc error: code = NotFound desc = could not find container \"25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528\": container with ID starting with 25489c05ae1f4235d5bb1c66908a21b8bc8341bec074392d2362a08949749528 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.696726 4916 scope.go:117] "RemoveContainer" containerID="62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.697009 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39"} err="failed to get container status \"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39\": rpc error: code = NotFound desc = could not find container \"62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39\": container with ID starting with 62caa56f067c36aead919252c7ba7b41eb7b5e0770ac23274eb7a1707d5a6d39 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.697029 4916 scope.go:117] "RemoveContainer" containerID="7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.697299 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70"} err="failed to get container status \"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70\": rpc error: code = NotFound desc = could not find container \"7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70\": container with ID starting with 7e008c844d0f56d825a5de9afaeab9ea61e27652593dc591f90d408f1081be70 not found: ID does not exist" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.930104 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.939342 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.963368 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:32:52 crc kubenswrapper[4916]: E1203 20:32:52.963781 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="prometheus" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.963797 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="prometheus" Dec 03 20:32:52 crc kubenswrapper[4916]: E1203 20:32:52.963821 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="init-config-reloader" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.963827 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="init-config-reloader" Dec 03 20:32:52 crc kubenswrapper[4916]: E1203 20:32:52.963838 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="thanos-sidecar" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.963844 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="thanos-sidecar" Dec 03 20:32:52 crc kubenswrapper[4916]: E1203 20:32:52.963857 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="config-reloader" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.963863 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="config-reloader" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.964020 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="prometheus" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.964038 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="thanos-sidecar" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.964050 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" containerName="config-reloader" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.965735 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.970489 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.970678 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.970827 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-fjxqs" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.971155 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.971538 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.972424 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.983770 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 03 20:32:52 crc kubenswrapper[4916]: I1203 20:32:52.987408 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.051851 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f14e71ef-0642-4eff-b6c8-08454ad04ccf-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.052263 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.052367 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.052527 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.052680 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.052806 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/f14e71ef-0642-4eff-b6c8-08454ad04ccf-prometheus-metric-storage-db\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.052935 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.053052 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-config\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.053173 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f14e71ef-0642-4eff-b6c8-08454ad04ccf-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.053629 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77sqx\" (UniqueName: \"kubernetes.io/projected/f14e71ef-0642-4eff-b6c8-08454ad04ccf-kube-api-access-77sqx\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.053766 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f14e71ef-0642-4eff-b6c8-08454ad04ccf-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.155795 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/f14e71ef-0642-4eff-b6c8-08454ad04ccf-prometheus-metric-storage-db\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.156051 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.156163 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-config\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.156270 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f14e71ef-0642-4eff-b6c8-08454ad04ccf-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.156621 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77sqx\" (UniqueName: \"kubernetes.io/projected/f14e71ef-0642-4eff-b6c8-08454ad04ccf-kube-api-access-77sqx\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.156763 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f14e71ef-0642-4eff-b6c8-08454ad04ccf-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.156918 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f14e71ef-0642-4eff-b6c8-08454ad04ccf-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.157025 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.157136 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.157274 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.157384 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.157493 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/f14e71ef-0642-4eff-b6c8-08454ad04ccf-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.156664 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/f14e71ef-0642-4eff-b6c8-08454ad04ccf-prometheus-metric-storage-db\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.159327 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f14e71ef-0642-4eff-b6c8-08454ad04ccf-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.159959 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.161065 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f14e71ef-0642-4eff-b6c8-08454ad04ccf-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.161370 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.161465 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.162314 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.162634 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.168538 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f14e71ef-0642-4eff-b6c8-08454ad04ccf-config\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.176279 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77sqx\" (UniqueName: \"kubernetes.io/projected/f14e71ef-0642-4eff-b6c8-08454ad04ccf-kube-api-access-77sqx\") pod \"prometheus-metric-storage-0\" (UID: \"f14e71ef-0642-4eff-b6c8-08454ad04ccf\") " pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:53 crc kubenswrapper[4916]: I1203 20:32:53.348353 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 20:32:54 crc kubenswrapper[4916]: I1203 20:32:54.180338 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 20:32:54 crc kubenswrapper[4916]: I1203 20:32:54.491019 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84ca7e63-bde0-47e3-a81b-efd7ac93b058" path="/var/lib/kubelet/pods/84ca7e63-bde0-47e3-a81b-efd7ac93b058/volumes" Dec 03 20:32:54 crc kubenswrapper[4916]: I1203 20:32:54.611770 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f14e71ef-0642-4eff-b6c8-08454ad04ccf","Type":"ContainerStarted","Data":"8c7a505d188ec9dc0e562824529554bb6b7461fb9f550c23232fbc6f91d22831"} Dec 03 20:32:58 crc kubenswrapper[4916]: I1203 20:32:58.676557 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f14e71ef-0642-4eff-b6c8-08454ad04ccf","Type":"ContainerStarted","Data":"bfcaa128c18b626e3bd21956002113880142af9acdd33bfc1166e3aa837c2eea"} Dec 03 20:33:02 crc kubenswrapper[4916]: I1203 20:33:02.477851 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:33:02 crc kubenswrapper[4916]: E1203 20:33:02.478662 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:33:08 crc kubenswrapper[4916]: I1203 20:33:08.805331 4916 generic.go:334] "Generic (PLEG): container finished" podID="f14e71ef-0642-4eff-b6c8-08454ad04ccf" containerID="bfcaa128c18b626e3bd21956002113880142af9acdd33bfc1166e3aa837c2eea" exitCode=0 Dec 03 20:33:08 crc kubenswrapper[4916]: I1203 20:33:08.805437 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f14e71ef-0642-4eff-b6c8-08454ad04ccf","Type":"ContainerDied","Data":"bfcaa128c18b626e3bd21956002113880142af9acdd33bfc1166e3aa837c2eea"} Dec 03 20:33:09 crc kubenswrapper[4916]: I1203 20:33:09.827936 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f14e71ef-0642-4eff-b6c8-08454ad04ccf","Type":"ContainerStarted","Data":"eae982f7a01dcb75fc14cfe6709ea483cfb3a80d38a60abcb0d778b3c8c318cd"} Dec 03 20:33:13 crc kubenswrapper[4916]: I1203 20:33:13.478019 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:33:13 crc kubenswrapper[4916]: E1203 20:33:13.478834 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:33:15 crc kubenswrapper[4916]: I1203 20:33:15.895078 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f14e71ef-0642-4eff-b6c8-08454ad04ccf","Type":"ContainerStarted","Data":"3d47a05d44478d5508cfe912b9ae5b9ad7f3b3c956eccf890a4b4b9e4f4e06eb"} Dec 03 20:33:15 crc kubenswrapper[4916]: I1203 20:33:15.896049 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"f14e71ef-0642-4eff-b6c8-08454ad04ccf","Type":"ContainerStarted","Data":"a7ce0318e52f1dbb6b1189beae4384715050d372f884438d97db6d45116241bd"} Dec 03 20:33:15 crc kubenswrapper[4916]: I1203 20:33:15.942935 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=23.942908504000002 podStartE2EDuration="23.942908504s" podCreationTimestamp="2025-12-03 20:32:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:33:15.933614637 +0000 UTC m=+3811.896424943" watchObservedRunningTime="2025-12-03 20:33:15.942908504 +0000 UTC m=+3811.905718810" Dec 03 20:33:18 crc kubenswrapper[4916]: I1203 20:33:18.348931 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 03 20:33:23 crc kubenswrapper[4916]: I1203 20:33:23.349066 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 03 20:33:23 crc kubenswrapper[4916]: I1203 20:33:23.358897 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 03 20:33:24 crc kubenswrapper[4916]: I1203 20:33:24.048790 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 03 20:33:28 crc kubenswrapper[4916]: I1203 20:33:28.478952 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:33:28 crc kubenswrapper[4916]: E1203 20:33:28.481016 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:33:39 crc kubenswrapper[4916]: I1203 20:33:39.478544 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:33:39 crc kubenswrapper[4916]: E1203 20:33:39.479663 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:33:50 crc kubenswrapper[4916]: I1203 20:33:50.478222 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:33:50 crc kubenswrapper[4916]: E1203 20:33:50.479161 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:34:03 crc kubenswrapper[4916]: I1203 20:34:03.478530 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:34:03 crc kubenswrapper[4916]: E1203 20:34:03.479783 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:34:17 crc kubenswrapper[4916]: I1203 20:34:17.478890 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:34:17 crc kubenswrapper[4916]: E1203 20:34:17.479856 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:34:32 crc kubenswrapper[4916]: I1203 20:34:32.478079 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:34:32 crc kubenswrapper[4916]: E1203 20:34:32.479238 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:34:47 crc kubenswrapper[4916]: I1203 20:34:47.478652 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:34:48 crc kubenswrapper[4916]: I1203 20:34:48.065856 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"280086a6a26131e2928ec85e46e947327e0eccc4ecf2b5480e980dd302192d99"} Dec 03 20:34:51 crc kubenswrapper[4916]: I1203 20:34:51.773475 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_6f4635b6-2410-4d5f-a7c9-3cf0a04739f7/manager/0.log" Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.434550 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6wjnk/must-gather-lh6tb"] Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.436799 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/must-gather-lh6tb" Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.438861 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-6wjnk"/"default-dockercfg-dkvzj" Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.439044 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-6wjnk"/"kube-root-ca.crt" Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.439272 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-6wjnk"/"openshift-service-ca.crt" Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.446341 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-6wjnk/must-gather-lh6tb"] Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.549466 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1d5eccb0-079e-4d88-afc5-738abcbc8c62-must-gather-output\") pod \"must-gather-lh6tb\" (UID: \"1d5eccb0-079e-4d88-afc5-738abcbc8c62\") " pod="openshift-must-gather-6wjnk/must-gather-lh6tb" Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.549746 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhvwm\" (UniqueName: \"kubernetes.io/projected/1d5eccb0-079e-4d88-afc5-738abcbc8c62-kube-api-access-lhvwm\") pod \"must-gather-lh6tb\" (UID: \"1d5eccb0-079e-4d88-afc5-738abcbc8c62\") " pod="openshift-must-gather-6wjnk/must-gather-lh6tb" Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.651642 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1d5eccb0-079e-4d88-afc5-738abcbc8c62-must-gather-output\") pod \"must-gather-lh6tb\" (UID: \"1d5eccb0-079e-4d88-afc5-738abcbc8c62\") " pod="openshift-must-gather-6wjnk/must-gather-lh6tb" Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.651729 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhvwm\" (UniqueName: \"kubernetes.io/projected/1d5eccb0-079e-4d88-afc5-738abcbc8c62-kube-api-access-lhvwm\") pod \"must-gather-lh6tb\" (UID: \"1d5eccb0-079e-4d88-afc5-738abcbc8c62\") " pod="openshift-must-gather-6wjnk/must-gather-lh6tb" Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.652280 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1d5eccb0-079e-4d88-afc5-738abcbc8c62-must-gather-output\") pod \"must-gather-lh6tb\" (UID: \"1d5eccb0-079e-4d88-afc5-738abcbc8c62\") " pod="openshift-must-gather-6wjnk/must-gather-lh6tb" Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.676676 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhvwm\" (UniqueName: \"kubernetes.io/projected/1d5eccb0-079e-4d88-afc5-738abcbc8c62-kube-api-access-lhvwm\") pod \"must-gather-lh6tb\" (UID: \"1d5eccb0-079e-4d88-afc5-738abcbc8c62\") " pod="openshift-must-gather-6wjnk/must-gather-lh6tb" Dec 03 20:35:11 crc kubenswrapper[4916]: I1203 20:35:11.753988 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/must-gather-lh6tb" Dec 03 20:35:12 crc kubenswrapper[4916]: I1203 20:35:12.234906 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-6wjnk/must-gather-lh6tb"] Dec 03 20:35:12 crc kubenswrapper[4916]: I1203 20:35:12.350371 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6wjnk/must-gather-lh6tb" event={"ID":"1d5eccb0-079e-4d88-afc5-738abcbc8c62","Type":"ContainerStarted","Data":"a36c98d3a236e55a7fef2bd702fd3ee221b926fa9367a9cc4eac786dc2e92f8e"} Dec 03 20:35:21 crc kubenswrapper[4916]: I1203 20:35:21.468810 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6wjnk/must-gather-lh6tb" event={"ID":"1d5eccb0-079e-4d88-afc5-738abcbc8c62","Type":"ContainerStarted","Data":"66925e79d95bb9cb4f65928fd21f248206b947fc86af1cfe49a071ab48e04b87"} Dec 03 20:35:22 crc kubenswrapper[4916]: I1203 20:35:22.491219 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6wjnk/must-gather-lh6tb" event={"ID":"1d5eccb0-079e-4d88-afc5-738abcbc8c62","Type":"ContainerStarted","Data":"23deeb95c674daa5803b20bfa729fd730bb142593848ccd87e1cbcc3a7ee59ef"} Dec 03 20:35:22 crc kubenswrapper[4916]: I1203 20:35:22.515069 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-6wjnk/must-gather-lh6tb" podStartSLOduration=3.480610214 podStartE2EDuration="11.515046249s" podCreationTimestamp="2025-12-03 20:35:11 +0000 UTC" firstStartedPulling="2025-12-03 20:35:12.252523069 +0000 UTC m=+3928.215333335" lastFinishedPulling="2025-12-03 20:35:20.286959104 +0000 UTC m=+3936.249769370" observedRunningTime="2025-12-03 20:35:22.502257978 +0000 UTC m=+3938.465068254" watchObservedRunningTime="2025-12-03 20:35:22.515046249 +0000 UTC m=+3938.477856515" Dec 03 20:35:25 crc kubenswrapper[4916]: E1203 20:35:25.360553 4916 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.175:55846->38.102.83.175:36291: write tcp 38.102.83.175:55846->38.102.83.175:36291: write: broken pipe Dec 03 20:35:25 crc kubenswrapper[4916]: I1203 20:35:25.930362 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6wjnk/crc-debug-7qljn"] Dec 03 20:35:25 crc kubenswrapper[4916]: I1203 20:35:25.931639 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/crc-debug-7qljn" Dec 03 20:35:26 crc kubenswrapper[4916]: I1203 20:35:26.147292 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xszh4\" (UniqueName: \"kubernetes.io/projected/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-kube-api-access-xszh4\") pod \"crc-debug-7qljn\" (UID: \"a9439e43-2e9c-482e-ba29-8b1e6ea13c73\") " pod="openshift-must-gather-6wjnk/crc-debug-7qljn" Dec 03 20:35:26 crc kubenswrapper[4916]: I1203 20:35:26.147420 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-host\") pod \"crc-debug-7qljn\" (UID: \"a9439e43-2e9c-482e-ba29-8b1e6ea13c73\") " pod="openshift-must-gather-6wjnk/crc-debug-7qljn" Dec 03 20:35:26 crc kubenswrapper[4916]: I1203 20:35:26.249183 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xszh4\" (UniqueName: \"kubernetes.io/projected/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-kube-api-access-xszh4\") pod \"crc-debug-7qljn\" (UID: \"a9439e43-2e9c-482e-ba29-8b1e6ea13c73\") " pod="openshift-must-gather-6wjnk/crc-debug-7qljn" Dec 03 20:35:26 crc kubenswrapper[4916]: I1203 20:35:26.249555 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-host\") pod \"crc-debug-7qljn\" (UID: \"a9439e43-2e9c-482e-ba29-8b1e6ea13c73\") " pod="openshift-must-gather-6wjnk/crc-debug-7qljn" Dec 03 20:35:26 crc kubenswrapper[4916]: I1203 20:35:26.249777 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-host\") pod \"crc-debug-7qljn\" (UID: \"a9439e43-2e9c-482e-ba29-8b1e6ea13c73\") " pod="openshift-must-gather-6wjnk/crc-debug-7qljn" Dec 03 20:35:26 crc kubenswrapper[4916]: I1203 20:35:26.294802 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xszh4\" (UniqueName: \"kubernetes.io/projected/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-kube-api-access-xszh4\") pod \"crc-debug-7qljn\" (UID: \"a9439e43-2e9c-482e-ba29-8b1e6ea13c73\") " pod="openshift-must-gather-6wjnk/crc-debug-7qljn" Dec 03 20:35:26 crc kubenswrapper[4916]: I1203 20:35:26.550631 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/crc-debug-7qljn" Dec 03 20:35:27 crc kubenswrapper[4916]: I1203 20:35:27.533850 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6wjnk/crc-debug-7qljn" event={"ID":"a9439e43-2e9c-482e-ba29-8b1e6ea13c73","Type":"ContainerStarted","Data":"78604bc6cce01a762628630431dd516bd1eed3e6af73a375d18d6533f0b5d00f"} Dec 03 20:35:38 crc kubenswrapper[4916]: I1203 20:35:38.629921 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6wjnk/crc-debug-7qljn" event={"ID":"a9439e43-2e9c-482e-ba29-8b1e6ea13c73","Type":"ContainerStarted","Data":"bb35a8825ba9a92ce05a5ec5d23fb3763c210582553c6aadfd1908bd12a5f34b"} Dec 03 20:35:38 crc kubenswrapper[4916]: I1203 20:35:38.648087 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-6wjnk/crc-debug-7qljn" podStartSLOduration=2.437251626 podStartE2EDuration="13.648065668s" podCreationTimestamp="2025-12-03 20:35:25 +0000 UTC" firstStartedPulling="2025-12-03 20:35:26.609683839 +0000 UTC m=+3942.572494105" lastFinishedPulling="2025-12-03 20:35:37.820497841 +0000 UTC m=+3953.783308147" observedRunningTime="2025-12-03 20:35:38.643659052 +0000 UTC m=+3954.606469338" watchObservedRunningTime="2025-12-03 20:35:38.648065668 +0000 UTC m=+3954.610875944" Dec 03 20:35:54 crc kubenswrapper[4916]: I1203 20:35:54.790354 4916 generic.go:334] "Generic (PLEG): container finished" podID="a9439e43-2e9c-482e-ba29-8b1e6ea13c73" containerID="bb35a8825ba9a92ce05a5ec5d23fb3763c210582553c6aadfd1908bd12a5f34b" exitCode=0 Dec 03 20:35:54 crc kubenswrapper[4916]: I1203 20:35:54.790438 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6wjnk/crc-debug-7qljn" event={"ID":"a9439e43-2e9c-482e-ba29-8b1e6ea13c73","Type":"ContainerDied","Data":"bb35a8825ba9a92ce05a5ec5d23fb3763c210582553c6aadfd1908bd12a5f34b"} Dec 03 20:35:55 crc kubenswrapper[4916]: I1203 20:35:55.921677 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/crc-debug-7qljn" Dec 03 20:35:55 crc kubenswrapper[4916]: I1203 20:35:55.959139 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-host\") pod \"a9439e43-2e9c-482e-ba29-8b1e6ea13c73\" (UID: \"a9439e43-2e9c-482e-ba29-8b1e6ea13c73\") " Dec 03 20:35:55 crc kubenswrapper[4916]: I1203 20:35:55.959297 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-host" (OuterVolumeSpecName: "host") pod "a9439e43-2e9c-482e-ba29-8b1e6ea13c73" (UID: "a9439e43-2e9c-482e-ba29-8b1e6ea13c73"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 20:35:55 crc kubenswrapper[4916]: I1203 20:35:55.959323 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xszh4\" (UniqueName: \"kubernetes.io/projected/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-kube-api-access-xszh4\") pod \"a9439e43-2e9c-482e-ba29-8b1e6ea13c73\" (UID: \"a9439e43-2e9c-482e-ba29-8b1e6ea13c73\") " Dec 03 20:35:55 crc kubenswrapper[4916]: I1203 20:35:55.960493 4916 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-host\") on node \"crc\" DevicePath \"\"" Dec 03 20:35:55 crc kubenswrapper[4916]: I1203 20:35:55.962287 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6wjnk/crc-debug-7qljn"] Dec 03 20:35:55 crc kubenswrapper[4916]: I1203 20:35:55.971503 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6wjnk/crc-debug-7qljn"] Dec 03 20:35:55 crc kubenswrapper[4916]: I1203 20:35:55.974014 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-kube-api-access-xszh4" (OuterVolumeSpecName: "kube-api-access-xszh4") pod "a9439e43-2e9c-482e-ba29-8b1e6ea13c73" (UID: "a9439e43-2e9c-482e-ba29-8b1e6ea13c73"). InnerVolumeSpecName "kube-api-access-xszh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:35:56 crc kubenswrapper[4916]: I1203 20:35:56.062494 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xszh4\" (UniqueName: \"kubernetes.io/projected/a9439e43-2e9c-482e-ba29-8b1e6ea13c73-kube-api-access-xszh4\") on node \"crc\" DevicePath \"\"" Dec 03 20:35:56 crc kubenswrapper[4916]: I1203 20:35:56.497836 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9439e43-2e9c-482e-ba29-8b1e6ea13c73" path="/var/lib/kubelet/pods/a9439e43-2e9c-482e-ba29-8b1e6ea13c73/volumes" Dec 03 20:35:56 crc kubenswrapper[4916]: I1203 20:35:56.807144 4916 scope.go:117] "RemoveContainer" containerID="bb35a8825ba9a92ce05a5ec5d23fb3763c210582553c6aadfd1908bd12a5f34b" Dec 03 20:35:56 crc kubenswrapper[4916]: I1203 20:35:56.807252 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/crc-debug-7qljn" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.277343 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6wjnk/crc-debug-tdnl9"] Dec 03 20:35:57 crc kubenswrapper[4916]: E1203 20:35:57.277927 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9439e43-2e9c-482e-ba29-8b1e6ea13c73" containerName="container-00" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.277939 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9439e43-2e9c-482e-ba29-8b1e6ea13c73" containerName="container-00" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.278134 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9439e43-2e9c-482e-ba29-8b1e6ea13c73" containerName="container-00" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.278806 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.387217 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a962cff-11da-4009-9cbb-b947fe068d60-host\") pod \"crc-debug-tdnl9\" (UID: \"6a962cff-11da-4009-9cbb-b947fe068d60\") " pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.387287 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wkvx\" (UniqueName: \"kubernetes.io/projected/6a962cff-11da-4009-9cbb-b947fe068d60-kube-api-access-6wkvx\") pod \"crc-debug-tdnl9\" (UID: \"6a962cff-11da-4009-9cbb-b947fe068d60\") " pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.489017 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wkvx\" (UniqueName: \"kubernetes.io/projected/6a962cff-11da-4009-9cbb-b947fe068d60-kube-api-access-6wkvx\") pod \"crc-debug-tdnl9\" (UID: \"6a962cff-11da-4009-9cbb-b947fe068d60\") " pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.489183 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a962cff-11da-4009-9cbb-b947fe068d60-host\") pod \"crc-debug-tdnl9\" (UID: \"6a962cff-11da-4009-9cbb-b947fe068d60\") " pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.489304 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a962cff-11da-4009-9cbb-b947fe068d60-host\") pod \"crc-debug-tdnl9\" (UID: \"6a962cff-11da-4009-9cbb-b947fe068d60\") " pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.506213 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wkvx\" (UniqueName: \"kubernetes.io/projected/6a962cff-11da-4009-9cbb-b947fe068d60-kube-api-access-6wkvx\") pod \"crc-debug-tdnl9\" (UID: \"6a962cff-11da-4009-9cbb-b947fe068d60\") " pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.593890 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" Dec 03 20:35:57 crc kubenswrapper[4916]: I1203 20:35:57.817124 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" event={"ID":"6a962cff-11da-4009-9cbb-b947fe068d60","Type":"ContainerStarted","Data":"4d344e3a53bf04ce8ea06b31ef31a83f2248af9e9c4ce3d7d2116c56ffb49b5f"} Dec 03 20:35:58 crc kubenswrapper[4916]: I1203 20:35:58.829599 4916 generic.go:334] "Generic (PLEG): container finished" podID="6a962cff-11da-4009-9cbb-b947fe068d60" containerID="14d685187818a87cf044054245d0a192c5d2333ac9e6c3b7da4f17ae33b40781" exitCode=1 Dec 03 20:35:58 crc kubenswrapper[4916]: I1203 20:35:58.829697 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" event={"ID":"6a962cff-11da-4009-9cbb-b947fe068d60","Type":"ContainerDied","Data":"14d685187818a87cf044054245d0a192c5d2333ac9e6c3b7da4f17ae33b40781"} Dec 03 20:35:58 crc kubenswrapper[4916]: I1203 20:35:58.869121 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6wjnk/crc-debug-tdnl9"] Dec 03 20:35:58 crc kubenswrapper[4916]: I1203 20:35:58.907398 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6wjnk/crc-debug-tdnl9"] Dec 03 20:35:59 crc kubenswrapper[4916]: I1203 20:35:59.947645 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" Dec 03 20:36:00 crc kubenswrapper[4916]: I1203 20:36:00.037210 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a962cff-11da-4009-9cbb-b947fe068d60-host\") pod \"6a962cff-11da-4009-9cbb-b947fe068d60\" (UID: \"6a962cff-11da-4009-9cbb-b947fe068d60\") " Dec 03 20:36:00 crc kubenswrapper[4916]: I1203 20:36:00.037331 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a962cff-11da-4009-9cbb-b947fe068d60-host" (OuterVolumeSpecName: "host") pod "6a962cff-11da-4009-9cbb-b947fe068d60" (UID: "6a962cff-11da-4009-9cbb-b947fe068d60"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 20:36:00 crc kubenswrapper[4916]: I1203 20:36:00.037580 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wkvx\" (UniqueName: \"kubernetes.io/projected/6a962cff-11da-4009-9cbb-b947fe068d60-kube-api-access-6wkvx\") pod \"6a962cff-11da-4009-9cbb-b947fe068d60\" (UID: \"6a962cff-11da-4009-9cbb-b947fe068d60\") " Dec 03 20:36:00 crc kubenswrapper[4916]: I1203 20:36:00.038610 4916 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a962cff-11da-4009-9cbb-b947fe068d60-host\") on node \"crc\" DevicePath \"\"" Dec 03 20:36:00 crc kubenswrapper[4916]: I1203 20:36:00.043834 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a962cff-11da-4009-9cbb-b947fe068d60-kube-api-access-6wkvx" (OuterVolumeSpecName: "kube-api-access-6wkvx") pod "6a962cff-11da-4009-9cbb-b947fe068d60" (UID: "6a962cff-11da-4009-9cbb-b947fe068d60"). InnerVolumeSpecName "kube-api-access-6wkvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:36:00 crc kubenswrapper[4916]: I1203 20:36:00.140818 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wkvx\" (UniqueName: \"kubernetes.io/projected/6a962cff-11da-4009-9cbb-b947fe068d60-kube-api-access-6wkvx\") on node \"crc\" DevicePath \"\"" Dec 03 20:36:00 crc kubenswrapper[4916]: I1203 20:36:00.504316 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a962cff-11da-4009-9cbb-b947fe068d60" path="/var/lib/kubelet/pods/6a962cff-11da-4009-9cbb-b947fe068d60/volumes" Dec 03 20:36:00 crc kubenswrapper[4916]: I1203 20:36:00.851853 4916 scope.go:117] "RemoveContainer" containerID="14d685187818a87cf044054245d0a192c5d2333ac9e6c3b7da4f17ae33b40781" Dec 03 20:36:00 crc kubenswrapper[4916]: I1203 20:36:00.851897 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/crc-debug-tdnl9" Dec 03 20:36:37 crc kubenswrapper[4916]: I1203 20:36:37.994549 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mrdfw"] Dec 03 20:36:37 crc kubenswrapper[4916]: E1203 20:36:37.995838 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a962cff-11da-4009-9cbb-b947fe068d60" containerName="container-00" Dec 03 20:36:37 crc kubenswrapper[4916]: I1203 20:36:37.995860 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a962cff-11da-4009-9cbb-b947fe068d60" containerName="container-00" Dec 03 20:36:37 crc kubenswrapper[4916]: I1203 20:36:37.996202 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a962cff-11da-4009-9cbb-b947fe068d60" containerName="container-00" Dec 03 20:36:37 crc kubenswrapper[4916]: I1203 20:36:37.999141 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.020783 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mrdfw"] Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.066895 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-catalog-content\") pod \"redhat-operators-mrdfw\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.067031 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm6gh\" (UniqueName: \"kubernetes.io/projected/5352f1bc-6a00-45f6-9917-1bbedf6caad8-kube-api-access-rm6gh\") pod \"redhat-operators-mrdfw\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.067159 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-utilities\") pod \"redhat-operators-mrdfw\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.168541 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm6gh\" (UniqueName: \"kubernetes.io/projected/5352f1bc-6a00-45f6-9917-1bbedf6caad8-kube-api-access-rm6gh\") pod \"redhat-operators-mrdfw\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.168812 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-utilities\") pod \"redhat-operators-mrdfw\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.168862 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-catalog-content\") pod \"redhat-operators-mrdfw\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.169876 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-catalog-content\") pod \"redhat-operators-mrdfw\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.170837 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-utilities\") pod \"redhat-operators-mrdfw\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.193084 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm6gh\" (UniqueName: \"kubernetes.io/projected/5352f1bc-6a00-45f6-9917-1bbedf6caad8-kube-api-access-rm6gh\") pod \"redhat-operators-mrdfw\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.331272 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:38 crc kubenswrapper[4916]: I1203 20:36:38.848121 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mrdfw"] Dec 03 20:36:38 crc kubenswrapper[4916]: W1203 20:36:38.854884 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5352f1bc_6a00_45f6_9917_1bbedf6caad8.slice/crio-58a3bf93f7b9064090b8c915cac5f84d0701430bf750d7e6e1df0d21d4b80166 WatchSource:0}: Error finding container 58a3bf93f7b9064090b8c915cac5f84d0701430bf750d7e6e1df0d21d4b80166: Status 404 returned error can't find the container with id 58a3bf93f7b9064090b8c915cac5f84d0701430bf750d7e6e1df0d21d4b80166 Dec 03 20:36:39 crc kubenswrapper[4916]: I1203 20:36:39.523883 4916 generic.go:334] "Generic (PLEG): container finished" podID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerID="4998c11dad2df2a515d7430e8c24b2921f0cbe600e4798941b9136978d421bfe" exitCode=0 Dec 03 20:36:39 crc kubenswrapper[4916]: I1203 20:36:39.523922 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrdfw" event={"ID":"5352f1bc-6a00-45f6-9917-1bbedf6caad8","Type":"ContainerDied","Data":"4998c11dad2df2a515d7430e8c24b2921f0cbe600e4798941b9136978d421bfe"} Dec 03 20:36:39 crc kubenswrapper[4916]: I1203 20:36:39.523946 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrdfw" event={"ID":"5352f1bc-6a00-45f6-9917-1bbedf6caad8","Type":"ContainerStarted","Data":"58a3bf93f7b9064090b8c915cac5f84d0701430bf750d7e6e1df0d21d4b80166"} Dec 03 20:36:39 crc kubenswrapper[4916]: I1203 20:36:39.525739 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 20:36:41 crc kubenswrapper[4916]: I1203 20:36:41.540408 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrdfw" event={"ID":"5352f1bc-6a00-45f6-9917-1bbedf6caad8","Type":"ContainerStarted","Data":"cb0706dcba030a28779aff63b6c88d9e8b69246d1427b609ddaaa8980ada5bd8"} Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.771345 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9lmc2"] Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.775216 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9lmc2"] Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.775340 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.860492 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gkcd\" (UniqueName: \"kubernetes.io/projected/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-kube-api-access-9gkcd\") pod \"community-operators-9lmc2\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.860550 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-utilities\") pod \"community-operators-9lmc2\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.860589 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-catalog-content\") pod \"community-operators-9lmc2\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.962908 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gkcd\" (UniqueName: \"kubernetes.io/projected/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-kube-api-access-9gkcd\") pod \"community-operators-9lmc2\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.962962 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-utilities\") pod \"community-operators-9lmc2\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.962983 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-catalog-content\") pod \"community-operators-9lmc2\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.963504 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-catalog-content\") pod \"community-operators-9lmc2\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.963723 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-utilities\") pod \"community-operators-9lmc2\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:42 crc kubenswrapper[4916]: I1203 20:36:42.994711 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gkcd\" (UniqueName: \"kubernetes.io/projected/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-kube-api-access-9gkcd\") pod \"community-operators-9lmc2\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:43 crc kubenswrapper[4916]: I1203 20:36:43.115677 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:43 crc kubenswrapper[4916]: I1203 20:36:43.677811 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9lmc2"] Dec 03 20:36:43 crc kubenswrapper[4916]: W1203 20:36:43.683144 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda55ea7b3_f0a8_44ef_86f6_e5c4fe44e0c8.slice/crio-b4f822f9e29687352e3c85fe49aa0459170937e5c24ab77d8ae3eba935e30c9e WatchSource:0}: Error finding container b4f822f9e29687352e3c85fe49aa0459170937e5c24ab77d8ae3eba935e30c9e: Status 404 returned error can't find the container with id b4f822f9e29687352e3c85fe49aa0459170937e5c24ab77d8ae3eba935e30c9e Dec 03 20:36:44 crc kubenswrapper[4916]: I1203 20:36:44.574023 4916 generic.go:334] "Generic (PLEG): container finished" podID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerID="cb0706dcba030a28779aff63b6c88d9e8b69246d1427b609ddaaa8980ada5bd8" exitCode=0 Dec 03 20:36:44 crc kubenswrapper[4916]: I1203 20:36:44.574116 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrdfw" event={"ID":"5352f1bc-6a00-45f6-9917-1bbedf6caad8","Type":"ContainerDied","Data":"cb0706dcba030a28779aff63b6c88d9e8b69246d1427b609ddaaa8980ada5bd8"} Dec 03 20:36:44 crc kubenswrapper[4916]: I1203 20:36:44.576473 4916 generic.go:334] "Generic (PLEG): container finished" podID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" containerID="76286afb030e40f99d26fd519287aa74bb23ba05ebfa165023d7b4eab6f7580a" exitCode=0 Dec 03 20:36:44 crc kubenswrapper[4916]: I1203 20:36:44.576504 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lmc2" event={"ID":"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8","Type":"ContainerDied","Data":"76286afb030e40f99d26fd519287aa74bb23ba05ebfa165023d7b4eab6f7580a"} Dec 03 20:36:44 crc kubenswrapper[4916]: I1203 20:36:44.576531 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lmc2" event={"ID":"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8","Type":"ContainerStarted","Data":"b4f822f9e29687352e3c85fe49aa0459170937e5c24ab77d8ae3eba935e30c9e"} Dec 03 20:36:45 crc kubenswrapper[4916]: I1203 20:36:45.586273 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lmc2" event={"ID":"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8","Type":"ContainerStarted","Data":"27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce"} Dec 03 20:36:45 crc kubenswrapper[4916]: I1203 20:36:45.590128 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrdfw" event={"ID":"5352f1bc-6a00-45f6-9917-1bbedf6caad8","Type":"ContainerStarted","Data":"cf9a53fe33028e8edf2f844b49b12a71bb8a2fe28beaf6a35b9df0ff725c4ec0"} Dec 03 20:36:45 crc kubenswrapper[4916]: I1203 20:36:45.622557 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mrdfw" podStartSLOduration=3.079156887 podStartE2EDuration="8.622541194s" podCreationTimestamp="2025-12-03 20:36:37 +0000 UTC" firstStartedPulling="2025-12-03 20:36:39.52549556 +0000 UTC m=+4015.488305826" lastFinishedPulling="2025-12-03 20:36:45.068879867 +0000 UTC m=+4021.031690133" observedRunningTime="2025-12-03 20:36:45.619114054 +0000 UTC m=+4021.581924320" watchObservedRunningTime="2025-12-03 20:36:45.622541194 +0000 UTC m=+4021.585351460" Dec 03 20:36:45 crc kubenswrapper[4916]: I1203 20:36:45.729099 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_aa884253-05a5-47e4-a258-d95aab45bb36/alertmanager/0.log" Dec 03 20:36:45 crc kubenswrapper[4916]: I1203 20:36:45.733609 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_aa884253-05a5-47e4-a258-d95aab45bb36/init-config-reloader/0.log" Dec 03 20:36:45 crc kubenswrapper[4916]: I1203 20:36:45.755185 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_aa884253-05a5-47e4-a258-d95aab45bb36/init-config-reloader/0.log" Dec 03 20:36:45 crc kubenswrapper[4916]: I1203 20:36:45.803235 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_aa884253-05a5-47e4-a258-d95aab45bb36/config-reloader/0.log" Dec 03 20:36:45 crc kubenswrapper[4916]: I1203 20:36:45.961996 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_2002a42d-6cf6-42dd-99b1-df69ff30bc53/aodh-api/0.log" Dec 03 20:36:46 crc kubenswrapper[4916]: I1203 20:36:46.039197 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_2002a42d-6cf6-42dd-99b1-df69ff30bc53/aodh-listener/0.log" Dec 03 20:36:46 crc kubenswrapper[4916]: I1203 20:36:46.074423 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_2002a42d-6cf6-42dd-99b1-df69ff30bc53/aodh-evaluator/0.log" Dec 03 20:36:46 crc kubenswrapper[4916]: I1203 20:36:46.176139 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_2002a42d-6cf6-42dd-99b1-df69ff30bc53/aodh-notifier/0.log" Dec 03 20:36:46 crc kubenswrapper[4916]: I1203 20:36:46.295823 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-f787c8578-2cjjd_eda087f8-dbb4-47ca-a210-576abc73a55e/barbican-api/0.log" Dec 03 20:36:46 crc kubenswrapper[4916]: I1203 20:36:46.347059 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-f787c8578-2cjjd_eda087f8-dbb4-47ca-a210-576abc73a55e/barbican-api-log/0.log" Dec 03 20:36:46 crc kubenswrapper[4916]: I1203 20:36:46.528530 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7755d7d784-5sk27_c3d699e4-f5e8-4719-bc16-b5a85bcaa695/barbican-keystone-listener/0.log" Dec 03 20:36:46 crc kubenswrapper[4916]: I1203 20:36:46.882511 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7755d7d784-5sk27_c3d699e4-f5e8-4719-bc16-b5a85bcaa695/barbican-keystone-listener-log/0.log" Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.021966 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c644b9d95-mhwlb_b33d04f2-ecc8-4c07-b258-60918f9aff05/barbican-worker-log/0.log" Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.028128 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c644b9d95-mhwlb_b33d04f2-ecc8-4c07-b258-60918f9aff05/barbican-worker/0.log" Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.133389 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp_93e63900-68b9-4c76-b614-78dcd0862645/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.295649 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6f01d12e-f1c3-4da2-b3bc-31623e4a2493/ceilometer-central-agent/0.log" Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.367598 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6f01d12e-f1c3-4da2-b3bc-31623e4a2493/ceilometer-notification-agent/0.log" Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.452106 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6f01d12e-f1c3-4da2-b3bc-31623e4a2493/proxy-httpd/0.log" Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.510502 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6f01d12e-f1c3-4da2-b3bc-31623e4a2493/sg-core/0.log" Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.610493 4916 generic.go:334] "Generic (PLEG): container finished" podID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" containerID="27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce" exitCode=0 Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.610540 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lmc2" event={"ID":"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8","Type":"ContainerDied","Data":"27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce"} Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.661946 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_b816c89d-8a9e-48c7-841a-dcb5ee7ab0df/cinder-api/0.log" Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.743318 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_b816c89d-8a9e-48c7-841a-dcb5ee7ab0df/cinder-api-log/0.log" Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.820506 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2727cbbe-cad4-47ff-b451-2f66b4f65bbf/cinder-scheduler/0.log" Dec 03 20:36:47 crc kubenswrapper[4916]: I1203 20:36:47.930774 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2727cbbe-cad4-47ff-b451-2f66b4f65bbf/probe/0.log" Dec 03 20:36:48 crc kubenswrapper[4916]: I1203 20:36:48.167453 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-v727v_8d641422-c093-42d7-bc60-6df1dd5b0796/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:48 crc kubenswrapper[4916]: I1203 20:36:48.206023 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz_61740a0d-2157-431d-a999-802aad6cb402/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:48 crc kubenswrapper[4916]: I1203 20:36:48.331658 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:48 crc kubenswrapper[4916]: I1203 20:36:48.331817 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:48 crc kubenswrapper[4916]: I1203 20:36:48.412236 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6f6df4f56c-xfpvl_5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62/init/0.log" Dec 03 20:36:48 crc kubenswrapper[4916]: I1203 20:36:48.691099 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l_f236d742-b29b-42c2-90ac-70d01657b967/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:48 crc kubenswrapper[4916]: I1203 20:36:48.698429 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6f6df4f56c-xfpvl_5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62/dnsmasq-dns/0.log" Dec 03 20:36:48 crc kubenswrapper[4916]: I1203 20:36:48.786886 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6f6df4f56c-xfpvl_5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62/init/0.log" Dec 03 20:36:48 crc kubenswrapper[4916]: I1203 20:36:48.971209 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pqz5f"] Dec 03 20:36:48 crc kubenswrapper[4916]: I1203 20:36:48.974131 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:48 crc kubenswrapper[4916]: I1203 20:36:48.995676 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqz5f"] Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.028096 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_09b23ba0-7111-4c00-9ecc-a4ea541b3ca4/glance-log/0.log" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.035762 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_09b23ba0-7111-4c00-9ecc-a4ea541b3ca4/glance-httpd/0.log" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.140859 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-catalog-content\") pod \"redhat-marketplace-pqz5f\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.140952 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-utilities\") pod \"redhat-marketplace-pqz5f\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.140976 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6q8w\" (UniqueName: \"kubernetes.io/projected/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-kube-api-access-k6q8w\") pod \"redhat-marketplace-pqz5f\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.243117 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-catalog-content\") pod \"redhat-marketplace-pqz5f\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.243209 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-utilities\") pod \"redhat-marketplace-pqz5f\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.243231 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6q8w\" (UniqueName: \"kubernetes.io/projected/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-kube-api-access-k6q8w\") pod \"redhat-marketplace-pqz5f\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.243785 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-catalog-content\") pod \"redhat-marketplace-pqz5f\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.243799 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-utilities\") pod \"redhat-marketplace-pqz5f\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.300532 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6q8w\" (UniqueName: \"kubernetes.io/projected/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-kube-api-access-k6q8w\") pod \"redhat-marketplace-pqz5f\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.338357 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.399522 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mrdfw" podUID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerName="registry-server" probeResult="failure" output=< Dec 03 20:36:49 crc kubenswrapper[4916]: timeout: failed to connect service ":50051" within 1s Dec 03 20:36:49 crc kubenswrapper[4916]: > Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.486700 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_65acea52-6e4e-44c7-9406-bc296db6821b/glance-httpd/0.log" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.618859 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_65acea52-6e4e-44c7-9406-bc296db6821b/glance-log/0.log" Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.687415 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lmc2" event={"ID":"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8","Type":"ContainerStarted","Data":"659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276"} Dec 03 20:36:49 crc kubenswrapper[4916]: I1203 20:36:49.712304 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9lmc2" podStartSLOduration=3.542239936 podStartE2EDuration="7.712283306s" podCreationTimestamp="2025-12-03 20:36:42 +0000 UTC" firstStartedPulling="2025-12-03 20:36:44.578373485 +0000 UTC m=+4020.541183741" lastFinishedPulling="2025-12-03 20:36:48.748416845 +0000 UTC m=+4024.711227111" observedRunningTime="2025-12-03 20:36:49.710984101 +0000 UTC m=+4025.673794367" watchObservedRunningTime="2025-12-03 20:36:49.712283306 +0000 UTC m=+4025.675093572" Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.005087 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqz5f"] Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.046511 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-56c49bcc9c-497gn_af5156d3-f2f0-4963-8561-5eac0b719c9a/heat-api/0.log" Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.119885 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-7b767dc896-5v8nl_e7ee41e1-65cb-4642-ae18-5f0a926d8c1d/heat-engine/0.log" Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.215433 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-84fcbd5864-k72dj_480f4ab0-3854-480f-9dd8-d44be1454e48/heat-cfnapi/0.log" Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.541374 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp_c94bfa68-5e27-47fe-a55e-b05abead70ac/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.699322 4916 generic.go:334] "Generic (PLEG): container finished" podID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" containerID="716115907c637d53559561044d25c4b48ee9459b4632943aa4acb5e63e2c5f84" exitCode=0 Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.699366 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqz5f" event={"ID":"3cfee2cd-6a80-47de-a938-86b7d5d40ccb","Type":"ContainerDied","Data":"716115907c637d53559561044d25c4b48ee9459b4632943aa4acb5e63e2c5f84"} Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.699389 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqz5f" event={"ID":"3cfee2cd-6a80-47de-a938-86b7d5d40ccb","Type":"ContainerStarted","Data":"c35c0f552d71af176fc15e67f62961cf2dd250cdeb76145d80465927d266c30f"} Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.775629 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-88vj9_6b66d006-a019-4921-9663-8fc348caf782/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.894994 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7c48bb485f-tqvlz_288ed5cf-795f-44fd-8ae8-ba522e48a62e/keystone-api/0.log" Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.983541 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_9d38924d-74b6-46db-9588-fa5c485fba69/kube-state-metrics/0.log" Dec 03 20:36:50 crc kubenswrapper[4916]: I1203 20:36:50.987143 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29413201-56f24_f303570c-cd6a-4249-9f85-dda22c04e2a7/keystone-cron/0.log" Dec 03 20:36:51 crc kubenswrapper[4916]: I1203 20:36:51.249252 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8_6c68c375-3a19-46dc-8d30-dd8f6edf361e/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:51 crc kubenswrapper[4916]: I1203 20:36:51.574024 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6b599f5db5-cs2bs_c924271a-a9cb-45cd-b1ab-3631a27c81aa/neutron-httpd/0.log" Dec 03 20:36:51 crc kubenswrapper[4916]: I1203 20:36:51.593535 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6b599f5db5-cs2bs_c924271a-a9cb-45cd-b1ab-3631a27c81aa/neutron-api/0.log" Dec 03 20:36:51 crc kubenswrapper[4916]: I1203 20:36:51.725739 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqz5f" event={"ID":"3cfee2cd-6a80-47de-a938-86b7d5d40ccb","Type":"ContainerStarted","Data":"36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959"} Dec 03 20:36:51 crc kubenswrapper[4916]: I1203 20:36:51.869693 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4_257d92ae-6326-4650-830c-b29ed36146e7/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:52 crc kubenswrapper[4916]: I1203 20:36:52.123375 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d9f3c72c-b924-4d5e-8c68-f62d5e83a870/nova-api-log/0.log" Dec 03 20:36:52 crc kubenswrapper[4916]: I1203 20:36:52.321003 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_a5a290e9-1938-4d33-a6b5-f7490d7a6bcc/nova-cell0-conductor-conductor/0.log" Dec 03 20:36:52 crc kubenswrapper[4916]: I1203 20:36:52.437862 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d9f3c72c-b924-4d5e-8c68-f62d5e83a870/nova-api-api/0.log" Dec 03 20:36:52 crc kubenswrapper[4916]: I1203 20:36:52.484260 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_22c68a9c-f222-4118-b636-311954e0d502/nova-cell1-conductor-conductor/0.log" Dec 03 20:36:52 crc kubenswrapper[4916]: I1203 20:36:52.635077 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_afa6fb7e-4053-4afb-89d5-2bce4d35c456/nova-cell1-novncproxy-novncproxy/0.log" Dec 03 20:36:52 crc kubenswrapper[4916]: I1203 20:36:52.734926 4916 generic.go:334] "Generic (PLEG): container finished" podID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" containerID="36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959" exitCode=0 Dec 03 20:36:52 crc kubenswrapper[4916]: I1203 20:36:52.734967 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqz5f" event={"ID":"3cfee2cd-6a80-47de-a938-86b7d5d40ccb","Type":"ContainerDied","Data":"36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959"} Dec 03 20:36:52 crc kubenswrapper[4916]: I1203 20:36:52.758553 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-zk7x7_8eaccb2f-783d-4da3-90ae-c88fdfef6c86/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:52 crc kubenswrapper[4916]: I1203 20:36:52.952923 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d3baf082-dd08-4c10-aac9-8ce2874aa2ae/nova-metadata-log/0.log" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.116874 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.116928 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.166812 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vdmtf"] Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.168959 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.183122 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vdmtf"] Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.205461 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_615a55a3-e9f9-4261-96a6-bcf865f0c183/nova-scheduler-scheduler/0.log" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.211585 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.253598 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-utilities\") pod \"certified-operators-vdmtf\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.253712 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g87ln\" (UniqueName: \"kubernetes.io/projected/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-kube-api-access-g87ln\") pod \"certified-operators-vdmtf\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.253802 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-catalog-content\") pod \"certified-operators-vdmtf\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.355263 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-utilities\") pod \"certified-operators-vdmtf\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.355415 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g87ln\" (UniqueName: \"kubernetes.io/projected/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-kube-api-access-g87ln\") pod \"certified-operators-vdmtf\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.355523 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-catalog-content\") pod \"certified-operators-vdmtf\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.355785 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-utilities\") pod \"certified-operators-vdmtf\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.357928 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-catalog-content\") pod \"certified-operators-vdmtf\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.361797 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8b94f14-6cc4-4c21-969c-e1aeb3c199fe/mysql-bootstrap/0.log" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.380737 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g87ln\" (UniqueName: \"kubernetes.io/projected/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-kube-api-access-g87ln\") pod \"certified-operators-vdmtf\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.517460 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.802601 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqz5f" event={"ID":"3cfee2cd-6a80-47de-a938-86b7d5d40ccb","Type":"ContainerStarted","Data":"04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974"} Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.831096 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pqz5f" podStartSLOduration=3.343318857 podStartE2EDuration="5.831082165s" podCreationTimestamp="2025-12-03 20:36:48 +0000 UTC" firstStartedPulling="2025-12-03 20:36:50.701385673 +0000 UTC m=+4026.664195939" lastFinishedPulling="2025-12-03 20:36:53.189148981 +0000 UTC m=+4029.151959247" observedRunningTime="2025-12-03 20:36:53.828985359 +0000 UTC m=+4029.791795625" watchObservedRunningTime="2025-12-03 20:36:53.831082165 +0000 UTC m=+4029.793892421" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.869498 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8b94f14-6cc4-4c21-969c-e1aeb3c199fe/galera/0.log" Dec 03 20:36:53 crc kubenswrapper[4916]: I1203 20:36:53.945058 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8b94f14-6cc4-4c21-969c-e1aeb3c199fe/mysql-bootstrap/0.log" Dec 03 20:36:54 crc kubenswrapper[4916]: I1203 20:36:54.056058 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vdmtf"] Dec 03 20:36:54 crc kubenswrapper[4916]: I1203 20:36:54.226965 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_85db28fe-52b4-4feb-8461-8c7a7e6e5179/mysql-bootstrap/0.log" Dec 03 20:36:54 crc kubenswrapper[4916]: I1203 20:36:54.440649 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_85db28fe-52b4-4feb-8461-8c7a7e6e5179/mysql-bootstrap/0.log" Dec 03 20:36:54 crc kubenswrapper[4916]: I1203 20:36:54.490658 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_85db28fe-52b4-4feb-8461-8c7a7e6e5179/galera/0.log" Dec 03 20:36:54 crc kubenswrapper[4916]: I1203 20:36:54.562195 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d3baf082-dd08-4c10-aac9-8ce2874aa2ae/nova-metadata-metadata/0.log" Dec 03 20:36:54 crc kubenswrapper[4916]: I1203 20:36:54.679714 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_0e722523-b45d-4256-a08c-088a095f77f5/openstackclient/0.log" Dec 03 20:36:54 crc kubenswrapper[4916]: I1203 20:36:54.747439 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ndv46_c79e8746-0571-48ab-ad7d-94b92eadc07e/openstack-network-exporter/0.log" Dec 03 20:36:54 crc kubenswrapper[4916]: I1203 20:36:54.809992 4916 generic.go:334] "Generic (PLEG): container finished" podID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" containerID="0f6dc4eb36e2613b86dc96750421d23e546f2b430976884312c0452979416bfd" exitCode=0 Dec 03 20:36:54 crc kubenswrapper[4916]: I1203 20:36:54.811099 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vdmtf" event={"ID":"e49df94d-0f20-43eb-9ada-fcdf24a1ff88","Type":"ContainerDied","Data":"0f6dc4eb36e2613b86dc96750421d23e546f2b430976884312c0452979416bfd"} Dec 03 20:36:54 crc kubenswrapper[4916]: I1203 20:36:54.811140 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vdmtf" event={"ID":"e49df94d-0f20-43eb-9ada-fcdf24a1ff88","Type":"ContainerStarted","Data":"46ba058fefcff7322449297a938201da200300712e7bdaecf361d6bd4115ad7f"} Dec 03 20:36:54 crc kubenswrapper[4916]: I1203 20:36:54.880630 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7wkt5_4180ae6f-d0a4-4af0-b89c-48ab118b3f8c/ovsdb-server-init/0.log" Dec 03 20:36:55 crc kubenswrapper[4916]: I1203 20:36:55.209546 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7wkt5_4180ae6f-d0a4-4af0-b89c-48ab118b3f8c/ovsdb-server/0.log" Dec 03 20:36:55 crc kubenswrapper[4916]: I1203 20:36:55.217960 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7wkt5_4180ae6f-d0a4-4af0-b89c-48ab118b3f8c/ovs-vswitchd/0.log" Dec 03 20:36:55 crc kubenswrapper[4916]: I1203 20:36:55.297791 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7wkt5_4180ae6f-d0a4-4af0-b89c-48ab118b3f8c/ovsdb-server-init/0.log" Dec 03 20:36:55 crc kubenswrapper[4916]: I1203 20:36:55.377000 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-stq8b_7cb5f017-c41b-4af3-8455-e1ab42faa626/ovn-controller/0.log" Dec 03 20:36:55 crc kubenswrapper[4916]: I1203 20:36:55.540507 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qv9hr_49da2e3d-9d45-478a-b073-beb7a5ca51ae/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:55 crc kubenswrapper[4916]: I1203 20:36:55.745928 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_ec7f91f3-02c9-42a9-b415-aa58806d9b17/openstack-network-exporter/0.log" Dec 03 20:36:55 crc kubenswrapper[4916]: I1203 20:36:55.825295 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vdmtf" event={"ID":"e49df94d-0f20-43eb-9ada-fcdf24a1ff88","Type":"ContainerStarted","Data":"00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1"} Dec 03 20:36:55 crc kubenswrapper[4916]: I1203 20:36:55.919401 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_ec7f91f3-02c9-42a9-b415-aa58806d9b17/ovn-northd/0.log" Dec 03 20:36:56 crc kubenswrapper[4916]: I1203 20:36:56.018490 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_87458b34-0f3f-430d-8c93-a3138854fc20/ovsdbserver-nb/0.log" Dec 03 20:36:56 crc kubenswrapper[4916]: I1203 20:36:56.088000 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_87458b34-0f3f-430d-8c93-a3138854fc20/openstack-network-exporter/0.log" Dec 03 20:36:56 crc kubenswrapper[4916]: I1203 20:36:56.320731 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_8e2bf00a-bdbe-4c59-b020-b1c3d96375f3/openstack-network-exporter/0.log" Dec 03 20:36:56 crc kubenswrapper[4916]: I1203 20:36:56.545596 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_8e2bf00a-bdbe-4c59-b020-b1c3d96375f3/ovsdbserver-sb/0.log" Dec 03 20:36:56 crc kubenswrapper[4916]: I1203 20:36:56.579860 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5747d5b464-dtdts_b6858994-e73c-4542-9cb1-5bb0213f35bf/placement-api/0.log" Dec 03 20:36:56 crc kubenswrapper[4916]: I1203 20:36:56.648134 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5747d5b464-dtdts_b6858994-e73c-4542-9cb1-5bb0213f35bf/placement-log/0.log" Dec 03 20:36:56 crc kubenswrapper[4916]: I1203 20:36:56.761559 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f14e71ef-0642-4eff-b6c8-08454ad04ccf/init-config-reloader/0.log" Dec 03 20:36:56 crc kubenswrapper[4916]: I1203 20:36:56.965529 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f14e71ef-0642-4eff-b6c8-08454ad04ccf/config-reloader/0.log" Dec 03 20:36:56 crc kubenswrapper[4916]: I1203 20:36:56.983488 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f14e71ef-0642-4eff-b6c8-08454ad04ccf/prometheus/0.log" Dec 03 20:36:57 crc kubenswrapper[4916]: I1203 20:36:57.015842 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f14e71ef-0642-4eff-b6c8-08454ad04ccf/thanos-sidecar/0.log" Dec 03 20:36:57 crc kubenswrapper[4916]: I1203 20:36:57.043423 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f14e71ef-0642-4eff-b6c8-08454ad04ccf/init-config-reloader/0.log" Dec 03 20:36:57 crc kubenswrapper[4916]: I1203 20:36:57.183788 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c3d2c7b8-c85e-4806-986b-55b486864e84/setup-container/0.log" Dec 03 20:36:57 crc kubenswrapper[4916]: I1203 20:36:57.388018 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c3d2c7b8-c85e-4806-986b-55b486864e84/setup-container/0.log" Dec 03 20:36:57 crc kubenswrapper[4916]: I1203 20:36:57.445026 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c3d2c7b8-c85e-4806-986b-55b486864e84/rabbitmq/0.log" Dec 03 20:36:57 crc kubenswrapper[4916]: I1203 20:36:57.492317 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a80b26ac-d55e-4513-9a8d-a70a0b197433/setup-container/0.log" Dec 03 20:36:57 crc kubenswrapper[4916]: I1203 20:36:57.758249 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a80b26ac-d55e-4513-9a8d-a70a0b197433/setup-container/0.log" Dec 03 20:36:57 crc kubenswrapper[4916]: I1203 20:36:57.786282 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-m572f_c7109f13-10df-437e-96da-34c0889a9231/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:57 crc kubenswrapper[4916]: I1203 20:36:57.797025 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a80b26ac-d55e-4513-9a8d-a70a0b197433/rabbitmq/0.log" Dec 03 20:36:57 crc kubenswrapper[4916]: I1203 20:36:57.843680 4916 generic.go:334] "Generic (PLEG): container finished" podID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" containerID="00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1" exitCode=0 Dec 03 20:36:57 crc kubenswrapper[4916]: I1203 20:36:57.843769 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vdmtf" event={"ID":"e49df94d-0f20-43eb-9ada-fcdf24a1ff88","Type":"ContainerDied","Data":"00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1"} Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.090525 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-bbf7x_af00bdd2-2610-40a8-b6d7-1252796d9341/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.121414 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd_d2eaefa8-6147-45c5-ae3e-77e0d47c2d11/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.311694 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-4pcnc_ace07bb7-8494-4a26-9737-33b0407dde91/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.394765 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.415354 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-qcp8f_d3c34a0f-3914-4307-9e37-317749a61c02/ssh-known-hosts-edpm-deployment/0.log" Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.445585 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.576039 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5fb66fd5df-mqd8w_7c9b70bb-7121-4484-9d1c-f928d26b6f3a/proxy-server/0.log" Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.728317 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5fb66fd5df-mqd8w_7c9b70bb-7121-4484-9d1c-f928d26b6f3a/proxy-httpd/0.log" Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.840960 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-wfwfd_955b2a04-73e1-4ab5-b322-e301684e8785/swift-ring-rebalance/0.log" Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.854512 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vdmtf" event={"ID":"e49df94d-0f20-43eb-9ada-fcdf24a1ff88","Type":"ContainerStarted","Data":"c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582"} Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.872607 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vdmtf" podStartSLOduration=2.427813082 podStartE2EDuration="5.87259336s" podCreationTimestamp="2025-12-03 20:36:53 +0000 UTC" firstStartedPulling="2025-12-03 20:36:54.812405037 +0000 UTC m=+4030.775215303" lastFinishedPulling="2025-12-03 20:36:58.257185315 +0000 UTC m=+4034.219995581" observedRunningTime="2025-12-03 20:36:58.869874499 +0000 UTC m=+4034.832684765" watchObservedRunningTime="2025-12-03 20:36:58.87259336 +0000 UTC m=+4034.835403626" Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.943254 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/account-auditor/0.log" Dec 03 20:36:58 crc kubenswrapper[4916]: I1203 20:36:58.996901 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/account-reaper/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.098216 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/account-server/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.106906 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/account-replicator/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.208990 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/container-auditor/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.287840 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/container-replicator/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.338485 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.338623 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.390198 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.419305 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/container-updater/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.423959 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/container-server/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.436050 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/object-auditor/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.600872 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/object-expirer/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.634610 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/object-updater/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.665131 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/object-server/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.682359 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/object-replicator/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.839247 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/rsync/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.875075 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/swift-recon-cron/0.log" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.907106 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:36:59 crc kubenswrapper[4916]: I1203 20:36:59.981476 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p_531e7486-e849-4176-b8d7-b93e11082c0a/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:37:00 crc kubenswrapper[4916]: I1203 20:37:00.073958 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h_d64868ee-2aa9-48b3-bfd7-895a9daf8c5a/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:37:00 crc kubenswrapper[4916]: I1203 20:37:00.355364 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqz5f"] Dec 03 20:37:01 crc kubenswrapper[4916]: I1203 20:37:01.878101 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pqz5f" podUID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" containerName="registry-server" containerID="cri-o://04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974" gracePeriod=2 Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.506724 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.626438 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-catalog-content\") pod \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.626587 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6q8w\" (UniqueName: \"kubernetes.io/projected/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-kube-api-access-k6q8w\") pod \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.626742 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-utilities\") pod \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\" (UID: \"3cfee2cd-6a80-47de-a938-86b7d5d40ccb\") " Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.627460 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-utilities" (OuterVolumeSpecName: "utilities") pod "3cfee2cd-6a80-47de-a938-86b7d5d40ccb" (UID: "3cfee2cd-6a80-47de-a938-86b7d5d40ccb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.647207 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-kube-api-access-k6q8w" (OuterVolumeSpecName: "kube-api-access-k6q8w") pod "3cfee2cd-6a80-47de-a938-86b7d5d40ccb" (UID: "3cfee2cd-6a80-47de-a938-86b7d5d40ccb"). InnerVolumeSpecName "kube-api-access-k6q8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.648076 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3cfee2cd-6a80-47de-a938-86b7d5d40ccb" (UID: "3cfee2cd-6a80-47de-a938-86b7d5d40ccb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.728305 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.728335 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.728347 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6q8w\" (UniqueName: \"kubernetes.io/projected/3cfee2cd-6a80-47de-a938-86b7d5d40ccb-kube-api-access-k6q8w\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.750106 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mrdfw"] Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.750319 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mrdfw" podUID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerName="registry-server" containerID="cri-o://cf9a53fe33028e8edf2f844b49b12a71bb8a2fe28beaf6a35b9df0ff725c4ec0" gracePeriod=2 Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.890045 4916 generic.go:334] "Generic (PLEG): container finished" podID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerID="cf9a53fe33028e8edf2f844b49b12a71bb8a2fe28beaf6a35b9df0ff725c4ec0" exitCode=0 Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.890386 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrdfw" event={"ID":"5352f1bc-6a00-45f6-9917-1bbedf6caad8","Type":"ContainerDied","Data":"cf9a53fe33028e8edf2f844b49b12a71bb8a2fe28beaf6a35b9df0ff725c4ec0"} Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.893924 4916 generic.go:334] "Generic (PLEG): container finished" podID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" containerID="04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974" exitCode=0 Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.893952 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqz5f" event={"ID":"3cfee2cd-6a80-47de-a938-86b7d5d40ccb","Type":"ContainerDied","Data":"04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974"} Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.893978 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqz5f" event={"ID":"3cfee2cd-6a80-47de-a938-86b7d5d40ccb","Type":"ContainerDied","Data":"c35c0f552d71af176fc15e67f62961cf2dd250cdeb76145d80465927d266c30f"} Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.894016 4916 scope.go:117] "RemoveContainer" containerID="04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974" Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.894164 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqz5f" Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.921750 4916 scope.go:117] "RemoveContainer" containerID="36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959" Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.946045 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqz5f"] Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.955000 4916 scope.go:117] "RemoveContainer" containerID="716115907c637d53559561044d25c4b48ee9459b4632943aa4acb5e63e2c5f84" Dec 03 20:37:02 crc kubenswrapper[4916]: I1203 20:37:02.965495 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqz5f"] Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.007562 4916 scope.go:117] "RemoveContainer" containerID="04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974" Dec 03 20:37:03 crc kubenswrapper[4916]: E1203 20:37:03.018980 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974\": container with ID starting with 04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974 not found: ID does not exist" containerID="04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.019018 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974"} err="failed to get container status \"04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974\": rpc error: code = NotFound desc = could not find container \"04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974\": container with ID starting with 04f72d3077e96a7bd9fa92ad73ac73dbdf8a5cb998291457bd2ccab2ea872974 not found: ID does not exist" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.019041 4916 scope.go:117] "RemoveContainer" containerID="36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959" Dec 03 20:37:03 crc kubenswrapper[4916]: E1203 20:37:03.019339 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959\": container with ID starting with 36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959 not found: ID does not exist" containerID="36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.019362 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959"} err="failed to get container status \"36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959\": rpc error: code = NotFound desc = could not find container \"36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959\": container with ID starting with 36b0eec787864069f9272d871336e1b66e630a39dd8d1a58b981ff3d8f145959 not found: ID does not exist" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.019375 4916 scope.go:117] "RemoveContainer" containerID="716115907c637d53559561044d25c4b48ee9459b4632943aa4acb5e63e2c5f84" Dec 03 20:37:03 crc kubenswrapper[4916]: E1203 20:37:03.019791 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"716115907c637d53559561044d25c4b48ee9459b4632943aa4acb5e63e2c5f84\": container with ID starting with 716115907c637d53559561044d25c4b48ee9459b4632943aa4acb5e63e2c5f84 not found: ID does not exist" containerID="716115907c637d53559561044d25c4b48ee9459b4632943aa4acb5e63e2c5f84" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.019811 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"716115907c637d53559561044d25c4b48ee9459b4632943aa4acb5e63e2c5f84"} err="failed to get container status \"716115907c637d53559561044d25c4b48ee9459b4632943aa4acb5e63e2c5f84\": rpc error: code = NotFound desc = could not find container \"716115907c637d53559561044d25c4b48ee9459b4632943aa4acb5e63e2c5f84\": container with ID starting with 716115907c637d53559561044d25c4b48ee9459b4632943aa4acb5e63e2c5f84 not found: ID does not exist" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.178794 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.234887 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.340245 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-catalog-content\") pod \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.340286 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-utilities\") pod \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.340363 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rm6gh\" (UniqueName: \"kubernetes.io/projected/5352f1bc-6a00-45f6-9917-1bbedf6caad8-kube-api-access-rm6gh\") pod \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\" (UID: \"5352f1bc-6a00-45f6-9917-1bbedf6caad8\") " Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.341637 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-utilities" (OuterVolumeSpecName: "utilities") pod "5352f1bc-6a00-45f6-9917-1bbedf6caad8" (UID: "5352f1bc-6a00-45f6-9917-1bbedf6caad8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.355636 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5352f1bc-6a00-45f6-9917-1bbedf6caad8-kube-api-access-rm6gh" (OuterVolumeSpecName: "kube-api-access-rm6gh") pod "5352f1bc-6a00-45f6-9917-1bbedf6caad8" (UID: "5352f1bc-6a00-45f6-9917-1bbedf6caad8"). InnerVolumeSpecName "kube-api-access-rm6gh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.439317 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5352f1bc-6a00-45f6-9917-1bbedf6caad8" (UID: "5352f1bc-6a00-45f6-9917-1bbedf6caad8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.442194 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.442226 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5352f1bc-6a00-45f6-9917-1bbedf6caad8-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.442236 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rm6gh\" (UniqueName: \"kubernetes.io/projected/5352f1bc-6a00-45f6-9917-1bbedf6caad8-kube-api-access-rm6gh\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.518194 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.518411 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.562939 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.904519 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mrdfw" event={"ID":"5352f1bc-6a00-45f6-9917-1bbedf6caad8","Type":"ContainerDied","Data":"58a3bf93f7b9064090b8c915cac5f84d0701430bf750d7e6e1df0d21d4b80166"} Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.904607 4916 scope.go:117] "RemoveContainer" containerID="cf9a53fe33028e8edf2f844b49b12a71bb8a2fe28beaf6a35b9df0ff725c4ec0" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.904620 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mrdfw" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.936386 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mrdfw"] Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.939078 4916 scope.go:117] "RemoveContainer" containerID="cb0706dcba030a28779aff63b6c88d9e8b69246d1427b609ddaaa8980ada5bd8" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.944629 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mrdfw"] Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.975457 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:37:03 crc kubenswrapper[4916]: I1203 20:37:03.980542 4916 scope.go:117] "RemoveContainer" containerID="4998c11dad2df2a515d7430e8c24b2921f0cbe600e4798941b9136978d421bfe" Dec 03 20:37:04 crc kubenswrapper[4916]: I1203 20:37:04.522789 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" path="/var/lib/kubelet/pods/3cfee2cd-6a80-47de-a938-86b7d5d40ccb/volumes" Dec 03 20:37:04 crc kubenswrapper[4916]: I1203 20:37:04.523990 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" path="/var/lib/kubelet/pods/5352f1bc-6a00-45f6-9917-1bbedf6caad8/volumes" Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.150105 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9lmc2"] Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.150608 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9lmc2" podUID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" containerName="registry-server" containerID="cri-o://659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276" gracePeriod=2 Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.553663 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.700973 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gkcd\" (UniqueName: \"kubernetes.io/projected/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-kube-api-access-9gkcd\") pod \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.701230 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-utilities\") pod \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.701347 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-catalog-content\") pod \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\" (UID: \"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8\") " Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.709089 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-kube-api-access-9gkcd" (OuterVolumeSpecName: "kube-api-access-9gkcd") pod "a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" (UID: "a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8"). InnerVolumeSpecName "kube-api-access-9gkcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.712702 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-utilities" (OuterVolumeSpecName: "utilities") pod "a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" (UID: "a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.754746 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" (UID: "a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.802964 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.802989 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gkcd\" (UniqueName: \"kubernetes.io/projected/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-kube-api-access-9gkcd\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.803000 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.934980 4916 generic.go:334] "Generic (PLEG): container finished" podID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" containerID="659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276" exitCode=0 Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.935016 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lmc2" event={"ID":"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8","Type":"ContainerDied","Data":"659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276"} Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.935043 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9lmc2" event={"ID":"a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8","Type":"ContainerDied","Data":"b4f822f9e29687352e3c85fe49aa0459170937e5c24ab77d8ae3eba935e30c9e"} Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.935060 4916 scope.go:117] "RemoveContainer" containerID="659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276" Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.935070 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9lmc2" Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.968469 4916 scope.go:117] "RemoveContainer" containerID="27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce" Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.974626 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9lmc2"] Dec 03 20:37:06 crc kubenswrapper[4916]: I1203 20:37:06.996958 4916 scope.go:117] "RemoveContainer" containerID="76286afb030e40f99d26fd519287aa74bb23ba05ebfa165023d7b4eab6f7580a" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.034142 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9lmc2"] Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.041309 4916 scope.go:117] "RemoveContainer" containerID="659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276" Dec 03 20:37:07 crc kubenswrapper[4916]: E1203 20:37:07.043293 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276\": container with ID starting with 659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276 not found: ID does not exist" containerID="659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.043344 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276"} err="failed to get container status \"659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276\": rpc error: code = NotFound desc = could not find container \"659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276\": container with ID starting with 659924c2446a326c78fb8dcbcdd01b28661266e1281562fcc36873a2a171d276 not found: ID does not exist" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.043371 4916 scope.go:117] "RemoveContainer" containerID="27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce" Dec 03 20:37:07 crc kubenswrapper[4916]: E1203 20:37:07.043710 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce\": container with ID starting with 27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce not found: ID does not exist" containerID="27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.043748 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce"} err="failed to get container status \"27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce\": rpc error: code = NotFound desc = could not find container \"27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce\": container with ID starting with 27a5674dc2d2f408c2573d17a9e72e6e1ac7907684cc100223773fd5d857f1ce not found: ID does not exist" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.043775 4916 scope.go:117] "RemoveContainer" containerID="76286afb030e40f99d26fd519287aa74bb23ba05ebfa165023d7b4eab6f7580a" Dec 03 20:37:07 crc kubenswrapper[4916]: E1203 20:37:07.044147 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76286afb030e40f99d26fd519287aa74bb23ba05ebfa165023d7b4eab6f7580a\": container with ID starting with 76286afb030e40f99d26fd519287aa74bb23ba05ebfa165023d7b4eab6f7580a not found: ID does not exist" containerID="76286afb030e40f99d26fd519287aa74bb23ba05ebfa165023d7b4eab6f7580a" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.044186 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76286afb030e40f99d26fd519287aa74bb23ba05ebfa165023d7b4eab6f7580a"} err="failed to get container status \"76286afb030e40f99d26fd519287aa74bb23ba05ebfa165023d7b4eab6f7580a\": rpc error: code = NotFound desc = could not find container \"76286afb030e40f99d26fd519287aa74bb23ba05ebfa165023d7b4eab6f7580a\": container with ID starting with 76286afb030e40f99d26fd519287aa74bb23ba05ebfa165023d7b4eab6f7580a not found: ID does not exist" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.150038 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vdmtf"] Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.150548 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vdmtf" podUID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" containerName="registry-server" containerID="cri-o://c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582" gracePeriod=2 Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.216964 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_431d6c6e-0ec4-4eae-8bee-4fdce5e2328d/memcached/0.log" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.628288 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.719420 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-catalog-content\") pod \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.719594 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g87ln\" (UniqueName: \"kubernetes.io/projected/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-kube-api-access-g87ln\") pod \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.719634 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-utilities\") pod \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\" (UID: \"e49df94d-0f20-43eb-9ada-fcdf24a1ff88\") " Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.720446 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-utilities" (OuterVolumeSpecName: "utilities") pod "e49df94d-0f20-43eb-9ada-fcdf24a1ff88" (UID: "e49df94d-0f20-43eb-9ada-fcdf24a1ff88"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.724542 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-kube-api-access-g87ln" (OuterVolumeSpecName: "kube-api-access-g87ln") pod "e49df94d-0f20-43eb-9ada-fcdf24a1ff88" (UID: "e49df94d-0f20-43eb-9ada-fcdf24a1ff88"). InnerVolumeSpecName "kube-api-access-g87ln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.764195 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e49df94d-0f20-43eb-9ada-fcdf24a1ff88" (UID: "e49df94d-0f20-43eb-9ada-fcdf24a1ff88"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.823471 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g87ln\" (UniqueName: \"kubernetes.io/projected/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-kube-api-access-g87ln\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.823504 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.823514 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e49df94d-0f20-43eb-9ada-fcdf24a1ff88-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.951393 4916 generic.go:334] "Generic (PLEG): container finished" podID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" containerID="c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582" exitCode=0 Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.951484 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vdmtf" event={"ID":"e49df94d-0f20-43eb-9ada-fcdf24a1ff88","Type":"ContainerDied","Data":"c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582"} Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.951525 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vdmtf" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.951540 4916 scope.go:117] "RemoveContainer" containerID="c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582" Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.951529 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vdmtf" event={"ID":"e49df94d-0f20-43eb-9ada-fcdf24a1ff88","Type":"ContainerDied","Data":"46ba058fefcff7322449297a938201da200300712e7bdaecf361d6bd4115ad7f"} Dec 03 20:37:07 crc kubenswrapper[4916]: I1203 20:37:07.976700 4916 scope.go:117] "RemoveContainer" containerID="00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1" Dec 03 20:37:08 crc kubenswrapper[4916]: I1203 20:37:08.003545 4916 scope.go:117] "RemoveContainer" containerID="0f6dc4eb36e2613b86dc96750421d23e546f2b430976884312c0452979416bfd" Dec 03 20:37:08 crc kubenswrapper[4916]: I1203 20:37:08.019257 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vdmtf"] Dec 03 20:37:08 crc kubenswrapper[4916]: I1203 20:37:08.030959 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vdmtf"] Dec 03 20:37:08 crc kubenswrapper[4916]: I1203 20:37:08.054779 4916 scope.go:117] "RemoveContainer" containerID="c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582" Dec 03 20:37:08 crc kubenswrapper[4916]: E1203 20:37:08.055178 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582\": container with ID starting with c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582 not found: ID does not exist" containerID="c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582" Dec 03 20:37:08 crc kubenswrapper[4916]: I1203 20:37:08.055215 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582"} err="failed to get container status \"c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582\": rpc error: code = NotFound desc = could not find container \"c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582\": container with ID starting with c633937937b3caf4a56e9b28b9779b386b920d3189c878dadf6f43023d0cf582 not found: ID does not exist" Dec 03 20:37:08 crc kubenswrapper[4916]: I1203 20:37:08.055239 4916 scope.go:117] "RemoveContainer" containerID="00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1" Dec 03 20:37:08 crc kubenswrapper[4916]: E1203 20:37:08.055481 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1\": container with ID starting with 00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1 not found: ID does not exist" containerID="00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1" Dec 03 20:37:08 crc kubenswrapper[4916]: I1203 20:37:08.055506 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1"} err="failed to get container status \"00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1\": rpc error: code = NotFound desc = could not find container \"00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1\": container with ID starting with 00c101fc35fbd19f7c1408fbc9ed13673bbe06d9568935cb3746b3c9d336a7f1 not found: ID does not exist" Dec 03 20:37:08 crc kubenswrapper[4916]: I1203 20:37:08.055523 4916 scope.go:117] "RemoveContainer" containerID="0f6dc4eb36e2613b86dc96750421d23e546f2b430976884312c0452979416bfd" Dec 03 20:37:08 crc kubenswrapper[4916]: E1203 20:37:08.055783 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f6dc4eb36e2613b86dc96750421d23e546f2b430976884312c0452979416bfd\": container with ID starting with 0f6dc4eb36e2613b86dc96750421d23e546f2b430976884312c0452979416bfd not found: ID does not exist" containerID="0f6dc4eb36e2613b86dc96750421d23e546f2b430976884312c0452979416bfd" Dec 03 20:37:08 crc kubenswrapper[4916]: I1203 20:37:08.055831 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f6dc4eb36e2613b86dc96750421d23e546f2b430976884312c0452979416bfd"} err="failed to get container status \"0f6dc4eb36e2613b86dc96750421d23e546f2b430976884312c0452979416bfd\": rpc error: code = NotFound desc = could not find container \"0f6dc4eb36e2613b86dc96750421d23e546f2b430976884312c0452979416bfd\": container with ID starting with 0f6dc4eb36e2613b86dc96750421d23e546f2b430976884312c0452979416bfd not found: ID does not exist" Dec 03 20:37:08 crc kubenswrapper[4916]: I1203 20:37:08.489386 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" path="/var/lib/kubelet/pods/a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8/volumes" Dec 03 20:37:08 crc kubenswrapper[4916]: I1203 20:37:08.490350 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" path="/var/lib/kubelet/pods/e49df94d-0f20-43eb-9ada-fcdf24a1ff88/volumes" Dec 03 20:37:16 crc kubenswrapper[4916]: I1203 20:37:16.158749 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:37:16 crc kubenswrapper[4916]: I1203 20:37:16.159230 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:37:26 crc kubenswrapper[4916]: I1203 20:37:26.345918 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/util/0.log" Dec 03 20:37:26 crc kubenswrapper[4916]: I1203 20:37:26.483700 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/pull/0.log" Dec 03 20:37:26 crc kubenswrapper[4916]: I1203 20:37:26.497115 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/util/0.log" Dec 03 20:37:26 crc kubenswrapper[4916]: I1203 20:37:26.510949 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/pull/0.log" Dec 03 20:37:26 crc kubenswrapper[4916]: I1203 20:37:26.684224 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/util/0.log" Dec 03 20:37:26 crc kubenswrapper[4916]: I1203 20:37:26.690373 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/pull/0.log" Dec 03 20:37:26 crc kubenswrapper[4916]: I1203 20:37:26.702711 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/extract/0.log" Dec 03 20:37:26 crc kubenswrapper[4916]: I1203 20:37:26.829668 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-xn476_d2c7e9c7-96cd-47bd-978a-c3fd41c74089/kube-rbac-proxy/0.log" Dec 03 20:37:26 crc kubenswrapper[4916]: I1203 20:37:26.943953 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-xn476_d2c7e9c7-96cd-47bd-978a-c3fd41c74089/manager/0.log" Dec 03 20:37:26 crc kubenswrapper[4916]: I1203 20:37:26.961887 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-6pzj5_232cd6fc-5f1d-4398-ae7d-5c34f49843f3/kube-rbac-proxy/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.081792 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-6pzj5_232cd6fc-5f1d-4398-ae7d-5c34f49843f3/manager/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.143637 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-nsfm2_cf093783-d31b-42fc-a85f-fff6c35fdae8/kube-rbac-proxy/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.156866 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-nsfm2_cf093783-d31b-42fc-a85f-fff6c35fdae8/manager/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.296816 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-x6tb6_a83c7d54-9430-456f-b83e-abed5d9030b8/kube-rbac-proxy/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.403369 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-x6tb6_a83c7d54-9430-456f-b83e-abed5d9030b8/manager/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.489771 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-csdvv_b63d5bf7-8901-4bce-90d9-0006ae946230/kube-rbac-proxy/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.520200 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-csdvv_b63d5bf7-8901-4bce-90d9-0006ae946230/manager/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.624058 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-kndn4_31355b8f-c9a1-4ddf-a97f-de6d4f506a67/kube-rbac-proxy/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.654969 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-kndn4_31355b8f-c9a1-4ddf-a97f-de6d4f506a67/manager/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.746810 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-dth7w_a571d18b-686d-472d-9086-e192ec504db4/kube-rbac-proxy/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.878405 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-jltdg_907117b8-0a09-440c-bb47-bfa09ccec80b/kube-rbac-proxy/0.log" Dec 03 20:37:27 crc kubenswrapper[4916]: I1203 20:37:27.965111 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-jltdg_907117b8-0a09-440c-bb47-bfa09ccec80b/manager/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.025181 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-dth7w_a571d18b-686d-472d-9086-e192ec504db4/manager/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.160670 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-6x48f_877f6f89-deed-4f06-adb1-cfa6b5254db2/kube-rbac-proxy/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.224547 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-6x48f_877f6f89-deed-4f06-adb1-cfa6b5254db2/manager/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.283125 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-sd86t_7303e0f2-e41d-4220-a72b-88e6b44b016c/kube-rbac-proxy/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.353728 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-sd86t_7303e0f2-e41d-4220-a72b-88e6b44b016c/manager/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.388654 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-7q6dq_dc81071a-3da1-4e63-b733-13e39ecfb823/kube-rbac-proxy/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.468901 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-7q6dq_dc81071a-3da1-4e63-b733-13e39ecfb823/manager/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.578782 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-mllvg_251c39c1-e63c-4772-a0e6-88528867a64d/kube-rbac-proxy/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.652819 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-mllvg_251c39c1-e63c-4772-a0e6-88528867a64d/manager/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.762873 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-29mr4_462898ce-79ab-4cd6-b05e-e19b65c80fa1/kube-rbac-proxy/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.818101 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-29mr4_462898ce-79ab-4cd6-b05e-e19b65c80fa1/manager/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.910527 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-wp7lm_fc0e679d-4033-4479-ba7a-cdc160e0b6ad/kube-rbac-proxy/0.log" Dec 03 20:37:28 crc kubenswrapper[4916]: I1203 20:37:28.948682 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-wp7lm_fc0e679d-4033-4479-ba7a-cdc160e0b6ad/manager/0.log" Dec 03 20:37:29 crc kubenswrapper[4916]: I1203 20:37:29.066970 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs_f356aae7-fed8-4f1b-a863-d7b47bcda904/kube-rbac-proxy/0.log" Dec 03 20:37:29 crc kubenswrapper[4916]: I1203 20:37:29.087269 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs_f356aae7-fed8-4f1b-a863-d7b47bcda904/manager/0.log" Dec 03 20:37:29 crc kubenswrapper[4916]: I1203 20:37:29.464667 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-n7msw_005f663b-9f61-44c6-8958-bbb7311df12e/registry-server/0.log" Dec 03 20:37:29 crc kubenswrapper[4916]: I1203 20:37:29.496008 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-8547fd68fd-vc7gd_e623d74a-1d16-43ec-a04f-a1817a8a8294/operator/0.log" Dec 03 20:37:29 crc kubenswrapper[4916]: I1203 20:37:29.728927 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-cjms4_c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae/kube-rbac-proxy/0.log" Dec 03 20:37:29 crc kubenswrapper[4916]: I1203 20:37:29.771085 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-cjms4_c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae/manager/0.log" Dec 03 20:37:29 crc kubenswrapper[4916]: I1203 20:37:29.844209 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-xvp6h_8829329b-8de5-4a0d-bd48-9cb7338c2dd1/kube-rbac-proxy/0.log" Dec 03 20:37:29 crc kubenswrapper[4916]: I1203 20:37:29.953522 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-xvp6h_8829329b-8de5-4a0d-bd48-9cb7338c2dd1/manager/0.log" Dec 03 20:37:30 crc kubenswrapper[4916]: I1203 20:37:30.072030 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-m5s6k_1a7e3254-35ac-48fa-8ab7-11e85c780369/operator/0.log" Dec 03 20:37:30 crc kubenswrapper[4916]: I1203 20:37:30.206786 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-7bs98_04ce9fc2-2134-4d10-b3d8-764bca295eed/kube-rbac-proxy/0.log" Dec 03 20:37:30 crc kubenswrapper[4916]: I1203 20:37:30.340294 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_6f4635b6-2410-4d5f-a7c9-3cf0a04739f7/kube-rbac-proxy/0.log" Dec 03 20:37:30 crc kubenswrapper[4916]: I1203 20:37:30.367437 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-7bs98_04ce9fc2-2134-4d10-b3d8-764bca295eed/manager/0.log" Dec 03 20:37:30 crc kubenswrapper[4916]: I1203 20:37:30.564132 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-2txxc_a5bc0003-390d-477e-8b21-f7fda61cb051/kube-rbac-proxy/0.log" Dec 03 20:37:30 crc kubenswrapper[4916]: I1203 20:37:30.579960 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-2txxc_a5bc0003-390d-477e-8b21-f7fda61cb051/manager/0.log" Dec 03 20:37:30 crc kubenswrapper[4916]: I1203 20:37:30.651883 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_6f4635b6-2410-4d5f-a7c9-3cf0a04739f7/manager/0.log" Dec 03 20:37:30 crc kubenswrapper[4916]: I1203 20:37:30.760294 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-7vz8k_463726a8-9ad4-486b-b5b8-166fed3a6190/kube-rbac-proxy/0.log" Dec 03 20:37:30 crc kubenswrapper[4916]: I1203 20:37:30.807419 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-7vz8k_463726a8-9ad4-486b-b5b8-166fed3a6190/manager/0.log" Dec 03 20:37:30 crc kubenswrapper[4916]: I1203 20:37:30.834278 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-d8ff785c7-jbr4p_e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2/manager/0.log" Dec 03 20:37:46 crc kubenswrapper[4916]: I1203 20:37:46.158783 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:37:46 crc kubenswrapper[4916]: I1203 20:37:46.159298 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:37:51 crc kubenswrapper[4916]: I1203 20:37:51.388707 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-7rwpc_75435454-50d3-4560-821e-a5b8c171652b/control-plane-machine-set-operator/0.log" Dec 03 20:37:51 crc kubenswrapper[4916]: I1203 20:37:51.573901 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6ngfp_dbed5156-bd14-449e-943a-488606ac49e2/kube-rbac-proxy/0.log" Dec 03 20:37:51 crc kubenswrapper[4916]: I1203 20:37:51.599185 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6ngfp_dbed5156-bd14-449e-943a-488606ac49e2/machine-api-operator/0.log" Dec 03 20:38:07 crc kubenswrapper[4916]: I1203 20:38:07.872213 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-x7n5p_2f8dae92-e705-48ac-ae95-fb463698c6a7/cert-manager-controller/0.log" Dec 03 20:38:07 crc kubenswrapper[4916]: I1203 20:38:07.992492 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-pz52m_6e2a44e6-d6b3-4b89-a243-7a9f66b36e18/cert-manager-cainjector/0.log" Dec 03 20:38:08 crc kubenswrapper[4916]: I1203 20:38:08.073739 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-gvbbc_8ac2850b-bf3b-4c3c-a1d1-8e59ce302246/cert-manager-webhook/0.log" Dec 03 20:38:16 crc kubenswrapper[4916]: I1203 20:38:16.158832 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:38:16 crc kubenswrapper[4916]: I1203 20:38:16.159363 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:38:16 crc kubenswrapper[4916]: I1203 20:38:16.159415 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 20:38:16 crc kubenswrapper[4916]: I1203 20:38:16.160299 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"280086a6a26131e2928ec85e46e947327e0eccc4ecf2b5480e980dd302192d99"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 20:38:16 crc kubenswrapper[4916]: I1203 20:38:16.160369 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://280086a6a26131e2928ec85e46e947327e0eccc4ecf2b5480e980dd302192d99" gracePeriod=600 Dec 03 20:38:16 crc kubenswrapper[4916]: I1203 20:38:16.587479 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="280086a6a26131e2928ec85e46e947327e0eccc4ecf2b5480e980dd302192d99" exitCode=0 Dec 03 20:38:16 crc kubenswrapper[4916]: I1203 20:38:16.587521 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"280086a6a26131e2928ec85e46e947327e0eccc4ecf2b5480e980dd302192d99"} Dec 03 20:38:16 crc kubenswrapper[4916]: I1203 20:38:16.587554 4916 scope.go:117] "RemoveContainer" containerID="536cd6626f117abf7328a8f88f7610b4ae6a291c93c64c8b47ed5874f8dfb812" Dec 03 20:38:17 crc kubenswrapper[4916]: I1203 20:38:17.611876 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247"} Dec 03 20:38:22 crc kubenswrapper[4916]: I1203 20:38:22.391018 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-gbx7d_9ce1f525-9698-4837-8ad5-990c187fd275/nmstate-console-plugin/0.log" Dec 03 20:38:22 crc kubenswrapper[4916]: I1203 20:38:22.609192 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-gnlxq_7603911b-7915-49df-afc6-e80da6dd90f2/nmstate-handler/0.log" Dec 03 20:38:22 crc kubenswrapper[4916]: I1203 20:38:22.664015 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-nq5wn_a64200e6-4761-4d12-b787-7e0260253ffd/kube-rbac-proxy/0.log" Dec 03 20:38:22 crc kubenswrapper[4916]: I1203 20:38:22.675119 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-nq5wn_a64200e6-4761-4d12-b787-7e0260253ffd/nmstate-metrics/0.log" Dec 03 20:38:22 crc kubenswrapper[4916]: I1203 20:38:22.833317 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-6jmkr_697f3c53-6482-4054-8f02-fe024ba5f514/nmstate-operator/0.log" Dec 03 20:38:22 crc kubenswrapper[4916]: I1203 20:38:22.842904 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-tqmwt_1710047a-61c2-485d-ad6c-05691f102e43/nmstate-webhook/0.log" Dec 03 20:38:39 crc kubenswrapper[4916]: I1203 20:38:39.797903 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-zcqns_ca08d810-a5ec-4683-b666-4460bbaed1a1/kube-rbac-proxy/0.log" Dec 03 20:38:39 crc kubenswrapper[4916]: I1203 20:38:39.875373 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-zcqns_ca08d810-a5ec-4683-b666-4460bbaed1a1/controller/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.005762 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-frr-files/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.175107 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-reloader/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.177493 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-frr-files/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.213458 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-reloader/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.214792 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-metrics/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.372546 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-frr-files/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.413411 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-metrics/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.447082 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-reloader/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.473081 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-metrics/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.581554 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-frr-files/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.617165 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-metrics/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.626096 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-reloader/0.log" Dec 03 20:38:40 crc kubenswrapper[4916]: I1203 20:38:40.666465 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/controller/0.log" Dec 03 20:38:41 crc kubenswrapper[4916]: I1203 20:38:41.606264 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/frr-metrics/0.log" Dec 03 20:38:41 crc kubenswrapper[4916]: I1203 20:38:41.668264 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/kube-rbac-proxy-frr/0.log" Dec 03 20:38:41 crc kubenswrapper[4916]: I1203 20:38:41.685016 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/kube-rbac-proxy/0.log" Dec 03 20:38:41 crc kubenswrapper[4916]: I1203 20:38:41.845382 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/reloader/0.log" Dec 03 20:38:41 crc kubenswrapper[4916]: I1203 20:38:41.894407 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-42zxv_92d25015-4495-4c5a-a65d-e8027a8a1a00/frr-k8s-webhook-server/0.log" Dec 03 20:38:42 crc kubenswrapper[4916]: I1203 20:38:42.164604 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-f96f55954-vbv4j_75320d0a-a179-4fd1-8e6c-46dd6a8e88b1/manager/0.log" Dec 03 20:38:42 crc kubenswrapper[4916]: I1203 20:38:42.288030 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6647df69ff-4xrgb_b608b106-b706-4e13-9e78-6962d5346432/webhook-server/0.log" Dec 03 20:38:42 crc kubenswrapper[4916]: I1203 20:38:42.435475 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nrs5t_4fefc703-1f37-4d7f-a4cd-54415e811abe/kube-rbac-proxy/0.log" Dec 03 20:38:42 crc kubenswrapper[4916]: I1203 20:38:42.892960 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nrs5t_4fefc703-1f37-4d7f-a4cd-54415e811abe/speaker/0.log" Dec 03 20:38:43 crc kubenswrapper[4916]: I1203 20:38:43.057279 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/frr/0.log" Dec 03 20:38:57 crc kubenswrapper[4916]: I1203 20:38:57.577350 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/util/0.log" Dec 03 20:38:57 crc kubenswrapper[4916]: I1203 20:38:57.650873 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/pull/0.log" Dec 03 20:38:57 crc kubenswrapper[4916]: I1203 20:38:57.657832 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/pull/0.log" Dec 03 20:38:57 crc kubenswrapper[4916]: I1203 20:38:57.668321 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/util/0.log" Dec 03 20:38:57 crc kubenswrapper[4916]: I1203 20:38:57.809223 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/util/0.log" Dec 03 20:38:57 crc kubenswrapper[4916]: I1203 20:38:57.810190 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/pull/0.log" Dec 03 20:38:57 crc kubenswrapper[4916]: I1203 20:38:57.838673 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/extract/0.log" Dec 03 20:38:57 crc kubenswrapper[4916]: I1203 20:38:57.977501 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/util/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.131924 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/pull/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.132882 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/pull/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.159249 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/util/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.286305 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/pull/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.292717 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/util/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.330141 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/extract/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.488384 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/util/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.609630 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/util/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.630994 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/pull/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.674644 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/pull/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.775298 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/util/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.807776 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/extract/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.822027 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/pull/0.log" Dec 03 20:38:58 crc kubenswrapper[4916]: I1203 20:38:58.942307 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-utilities/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.143504 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-content/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.189725 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-utilities/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.196601 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-content/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.320060 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-utilities/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.322117 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-content/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.563292 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-utilities/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.789837 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-utilities/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.809854 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-content/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.815922 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-content/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.845214 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/registry-server/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.942933 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-utilities/0.log" Dec 03 20:38:59 crc kubenswrapper[4916]: I1203 20:38:59.992456 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-content/0.log" Dec 03 20:39:00 crc kubenswrapper[4916]: I1203 20:39:00.138379 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-rpvxq_23722fcc-7804-4705-9180-4dbd53a7e0e9/marketplace-operator/0.log" Dec 03 20:39:00 crc kubenswrapper[4916]: I1203 20:39:00.258518 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-utilities/0.log" Dec 03 20:39:00 crc kubenswrapper[4916]: I1203 20:39:00.511347 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/registry-server/0.log" Dec 03 20:39:00 crc kubenswrapper[4916]: I1203 20:39:00.644596 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-utilities/0.log" Dec 03 20:39:00 crc kubenswrapper[4916]: I1203 20:39:00.683514 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-content/0.log" Dec 03 20:39:00 crc kubenswrapper[4916]: I1203 20:39:00.684321 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-content/0.log" Dec 03 20:39:00 crc kubenswrapper[4916]: I1203 20:39:00.880307 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-content/0.log" Dec 03 20:39:00 crc kubenswrapper[4916]: I1203 20:39:00.883156 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-utilities/0.log" Dec 03 20:39:00 crc kubenswrapper[4916]: I1203 20:39:00.988003 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/registry-server/0.log" Dec 03 20:39:01 crc kubenswrapper[4916]: I1203 20:39:01.811011 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-utilities/0.log" Dec 03 20:39:01 crc kubenswrapper[4916]: I1203 20:39:01.965971 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-content/0.log" Dec 03 20:39:01 crc kubenswrapper[4916]: I1203 20:39:01.967908 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-content/0.log" Dec 03 20:39:01 crc kubenswrapper[4916]: I1203 20:39:01.982754 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-utilities/0.log" Dec 03 20:39:02 crc kubenswrapper[4916]: I1203 20:39:02.192892 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-content/0.log" Dec 03 20:39:02 crc kubenswrapper[4916]: I1203 20:39:02.193226 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-utilities/0.log" Dec 03 20:39:02 crc kubenswrapper[4916]: I1203 20:39:02.666009 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/registry-server/0.log" Dec 03 20:39:17 crc kubenswrapper[4916]: I1203 20:39:17.514361 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-z8g4j_e7cb08b3-e6d4-4165-ba93-b35ed50108c7/prometheus-operator/0.log" Dec 03 20:39:17 crc kubenswrapper[4916]: I1203 20:39:17.518868 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt_b9a62187-a514-4067-8eae-ed64cd6daa76/prometheus-operator-admission-webhook/0.log" Dec 03 20:39:17 crc kubenswrapper[4916]: I1203 20:39:17.715279 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5_8b910b03-47fc-4dff-87ca-eed3318f67e5/prometheus-operator-admission-webhook/0.log" Dec 03 20:39:17 crc kubenswrapper[4916]: I1203 20:39:17.724656 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-gl8qq_02c05089-1da5-466d-ae93-bd7b99d6cba4/operator/0.log" Dec 03 20:39:17 crc kubenswrapper[4916]: I1203 20:39:17.850100 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-5cbf2_4122a230-b1b6-4725-b02a-a0829dfa4f3e/perses-operator/0.log" Dec 03 20:39:28 crc kubenswrapper[4916]: E1203 20:39:28.082344 4916 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.175:35752->38.102.83.175:36291: write tcp 38.102.83.175:35752->38.102.83.175:36291: write: connection reset by peer Dec 03 20:40:44 crc kubenswrapper[4916]: I1203 20:40:44.229671 4916 generic.go:334] "Generic (PLEG): container finished" podID="1d5eccb0-079e-4d88-afc5-738abcbc8c62" containerID="66925e79d95bb9cb4f65928fd21f248206b947fc86af1cfe49a071ab48e04b87" exitCode=0 Dec 03 20:40:44 crc kubenswrapper[4916]: I1203 20:40:44.229789 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6wjnk/must-gather-lh6tb" event={"ID":"1d5eccb0-079e-4d88-afc5-738abcbc8c62","Type":"ContainerDied","Data":"66925e79d95bb9cb4f65928fd21f248206b947fc86af1cfe49a071ab48e04b87"} Dec 03 20:40:44 crc kubenswrapper[4916]: I1203 20:40:44.230779 4916 scope.go:117] "RemoveContainer" containerID="66925e79d95bb9cb4f65928fd21f248206b947fc86af1cfe49a071ab48e04b87" Dec 03 20:40:44 crc kubenswrapper[4916]: I1203 20:40:44.442836 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6wjnk_must-gather-lh6tb_1d5eccb0-079e-4d88-afc5-738abcbc8c62/gather/0.log" Dec 03 20:40:46 crc kubenswrapper[4916]: I1203 20:40:46.158590 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:40:46 crc kubenswrapper[4916]: I1203 20:40:46.158942 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:40:53 crc kubenswrapper[4916]: I1203 20:40:53.083038 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6wjnk/must-gather-lh6tb"] Dec 03 20:40:53 crc kubenswrapper[4916]: I1203 20:40:53.083829 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-6wjnk/must-gather-lh6tb" podUID="1d5eccb0-079e-4d88-afc5-738abcbc8c62" containerName="copy" containerID="cri-o://23deeb95c674daa5803b20bfa729fd730bb142593848ccd87e1cbcc3a7ee59ef" gracePeriod=2 Dec 03 20:40:53 crc kubenswrapper[4916]: I1203 20:40:53.093191 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6wjnk/must-gather-lh6tb"] Dec 03 20:40:53 crc kubenswrapper[4916]: I1203 20:40:53.335501 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6wjnk_must-gather-lh6tb_1d5eccb0-079e-4d88-afc5-738abcbc8c62/copy/0.log" Dec 03 20:40:53 crc kubenswrapper[4916]: I1203 20:40:53.336062 4916 generic.go:334] "Generic (PLEG): container finished" podID="1d5eccb0-079e-4d88-afc5-738abcbc8c62" containerID="23deeb95c674daa5803b20bfa729fd730bb142593848ccd87e1cbcc3a7ee59ef" exitCode=143 Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.129989 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6wjnk_must-gather-lh6tb_1d5eccb0-079e-4d88-afc5-738abcbc8c62/copy/0.log" Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.130683 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/must-gather-lh6tb" Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.293044 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1d5eccb0-079e-4d88-afc5-738abcbc8c62-must-gather-output\") pod \"1d5eccb0-079e-4d88-afc5-738abcbc8c62\" (UID: \"1d5eccb0-079e-4d88-afc5-738abcbc8c62\") " Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.293111 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhvwm\" (UniqueName: \"kubernetes.io/projected/1d5eccb0-079e-4d88-afc5-738abcbc8c62-kube-api-access-lhvwm\") pod \"1d5eccb0-079e-4d88-afc5-738abcbc8c62\" (UID: \"1d5eccb0-079e-4d88-afc5-738abcbc8c62\") " Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.298594 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d5eccb0-079e-4d88-afc5-738abcbc8c62-kube-api-access-lhvwm" (OuterVolumeSpecName: "kube-api-access-lhvwm") pod "1d5eccb0-079e-4d88-afc5-738abcbc8c62" (UID: "1d5eccb0-079e-4d88-afc5-738abcbc8c62"). InnerVolumeSpecName "kube-api-access-lhvwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.352133 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6wjnk_must-gather-lh6tb_1d5eccb0-079e-4d88-afc5-738abcbc8c62/copy/0.log" Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.352548 4916 scope.go:117] "RemoveContainer" containerID="23deeb95c674daa5803b20bfa729fd730bb142593848ccd87e1cbcc3a7ee59ef" Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.352708 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6wjnk/must-gather-lh6tb" Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.387013 4916 scope.go:117] "RemoveContainer" containerID="66925e79d95bb9cb4f65928fd21f248206b947fc86af1cfe49a071ab48e04b87" Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.397371 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhvwm\" (UniqueName: \"kubernetes.io/projected/1d5eccb0-079e-4d88-afc5-738abcbc8c62-kube-api-access-lhvwm\") on node \"crc\" DevicePath \"\"" Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.450185 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d5eccb0-079e-4d88-afc5-738abcbc8c62-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "1d5eccb0-079e-4d88-afc5-738abcbc8c62" (UID: "1d5eccb0-079e-4d88-afc5-738abcbc8c62"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.489635 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d5eccb0-079e-4d88-afc5-738abcbc8c62" path="/var/lib/kubelet/pods/1d5eccb0-079e-4d88-afc5-738abcbc8c62/volumes" Dec 03 20:40:54 crc kubenswrapper[4916]: I1203 20:40:54.499041 4916 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1d5eccb0-079e-4d88-afc5-738abcbc8c62-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 20:41:16 crc kubenswrapper[4916]: I1203 20:41:16.158705 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:41:16 crc kubenswrapper[4916]: I1203 20:41:16.160424 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:41:46 crc kubenswrapper[4916]: I1203 20:41:46.159545 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:41:46 crc kubenswrapper[4916]: I1203 20:41:46.160170 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:41:46 crc kubenswrapper[4916]: I1203 20:41:46.160231 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 20:41:46 crc kubenswrapper[4916]: I1203 20:41:46.161262 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 20:41:46 crc kubenswrapper[4916]: I1203 20:41:46.161345 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" gracePeriod=600 Dec 03 20:41:46 crc kubenswrapper[4916]: E1203 20:41:46.294796 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:41:47 crc kubenswrapper[4916]: I1203 20:41:47.040405 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" exitCode=0 Dec 03 20:41:47 crc kubenswrapper[4916]: I1203 20:41:47.040478 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247"} Dec 03 20:41:47 crc kubenswrapper[4916]: I1203 20:41:47.040592 4916 scope.go:117] "RemoveContainer" containerID="280086a6a26131e2928ec85e46e947327e0eccc4ecf2b5480e980dd302192d99" Dec 03 20:41:47 crc kubenswrapper[4916]: I1203 20:41:47.041573 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:41:47 crc kubenswrapper[4916]: E1203 20:41:47.042102 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:42:00 crc kubenswrapper[4916]: I1203 20:42:00.478963 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:42:00 crc kubenswrapper[4916]: E1203 20:42:00.481516 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:42:13 crc kubenswrapper[4916]: I1203 20:42:13.477955 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:42:13 crc kubenswrapper[4916]: E1203 20:42:13.478698 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:42:28 crc kubenswrapper[4916]: I1203 20:42:28.479406 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:42:28 crc kubenswrapper[4916]: E1203 20:42:28.480210 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:42:40 crc kubenswrapper[4916]: I1203 20:42:40.477922 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:42:40 crc kubenswrapper[4916]: E1203 20:42:40.478692 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:42:51 crc kubenswrapper[4916]: I1203 20:42:51.479250 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:42:51 crc kubenswrapper[4916]: E1203 20:42:51.480463 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:43:05 crc kubenswrapper[4916]: I1203 20:43:05.479035 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:43:05 crc kubenswrapper[4916]: E1203 20:43:05.479762 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:43:18 crc kubenswrapper[4916]: I1203 20:43:18.479156 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:43:18 crc kubenswrapper[4916]: E1203 20:43:18.480243 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:43:30 crc kubenswrapper[4916]: I1203 20:43:30.479175 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:43:30 crc kubenswrapper[4916]: E1203 20:43:30.480222 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:43:41 crc kubenswrapper[4916]: I1203 20:43:41.480152 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:43:41 crc kubenswrapper[4916]: E1203 20:43:41.481197 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:43:56 crc kubenswrapper[4916]: I1203 20:43:56.479235 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:43:56 crc kubenswrapper[4916]: E1203 20:43:56.481752 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.458671 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-99lp5/must-gather-b6rfh"] Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459677 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d5eccb0-079e-4d88-afc5-738abcbc8c62" containerName="copy" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459693 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d5eccb0-079e-4d88-afc5-738abcbc8c62" containerName="copy" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459709 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459718 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459737 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" containerName="extract-utilities" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459745 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" containerName="extract-utilities" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459769 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459777 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459799 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerName="extract-content" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459807 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerName="extract-content" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459820 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" containerName="extract-content" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459827 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" containerName="extract-content" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459843 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d5eccb0-079e-4d88-afc5-738abcbc8c62" containerName="gather" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459851 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d5eccb0-079e-4d88-afc5-738abcbc8c62" containerName="gather" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459870 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" containerName="extract-content" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459879 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" containerName="extract-content" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459894 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" containerName="extract-utilities" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459904 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" containerName="extract-utilities" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459922 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" containerName="extract-utilities" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459930 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" containerName="extract-utilities" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459950 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459958 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459972 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" containerName="extract-content" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459979 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" containerName="extract-content" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.459991 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.459999 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: E1203 20:44:05.460017 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerName="extract-utilities" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.460027 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerName="extract-utilities" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.460264 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cfee2cd-6a80-47de-a938-86b7d5d40ccb" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.460286 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="5352f1bc-6a00-45f6-9917-1bbedf6caad8" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.460304 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d5eccb0-079e-4d88-afc5-738abcbc8c62" containerName="copy" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.460318 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="e49df94d-0f20-43eb-9ada-fcdf24a1ff88" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.460347 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d5eccb0-079e-4d88-afc5-738abcbc8c62" containerName="gather" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.460368 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="a55ea7b3-f0a8-44ef-86f6-e5c4fe44e0c8" containerName="registry-server" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.461727 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/must-gather-b6rfh" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.466079 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-99lp5"/"openshift-service-ca.crt" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.468161 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-99lp5"/"kube-root-ca.crt" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.490169 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-99lp5/must-gather-b6rfh"] Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.630318 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvxhx\" (UniqueName: \"kubernetes.io/projected/28ecef4e-33a3-44b0-8223-102eb3ec2aed-kube-api-access-rvxhx\") pod \"must-gather-b6rfh\" (UID: \"28ecef4e-33a3-44b0-8223-102eb3ec2aed\") " pod="openshift-must-gather-99lp5/must-gather-b6rfh" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.630427 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/28ecef4e-33a3-44b0-8223-102eb3ec2aed-must-gather-output\") pod \"must-gather-b6rfh\" (UID: \"28ecef4e-33a3-44b0-8223-102eb3ec2aed\") " pod="openshift-must-gather-99lp5/must-gather-b6rfh" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.732724 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/28ecef4e-33a3-44b0-8223-102eb3ec2aed-must-gather-output\") pod \"must-gather-b6rfh\" (UID: \"28ecef4e-33a3-44b0-8223-102eb3ec2aed\") " pod="openshift-must-gather-99lp5/must-gather-b6rfh" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.732894 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvxhx\" (UniqueName: \"kubernetes.io/projected/28ecef4e-33a3-44b0-8223-102eb3ec2aed-kube-api-access-rvxhx\") pod \"must-gather-b6rfh\" (UID: \"28ecef4e-33a3-44b0-8223-102eb3ec2aed\") " pod="openshift-must-gather-99lp5/must-gather-b6rfh" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.733252 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/28ecef4e-33a3-44b0-8223-102eb3ec2aed-must-gather-output\") pod \"must-gather-b6rfh\" (UID: \"28ecef4e-33a3-44b0-8223-102eb3ec2aed\") " pod="openshift-must-gather-99lp5/must-gather-b6rfh" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.758528 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvxhx\" (UniqueName: \"kubernetes.io/projected/28ecef4e-33a3-44b0-8223-102eb3ec2aed-kube-api-access-rvxhx\") pod \"must-gather-b6rfh\" (UID: \"28ecef4e-33a3-44b0-8223-102eb3ec2aed\") " pod="openshift-must-gather-99lp5/must-gather-b6rfh" Dec 03 20:44:05 crc kubenswrapper[4916]: I1203 20:44:05.784319 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/must-gather-b6rfh" Dec 03 20:44:06 crc kubenswrapper[4916]: I1203 20:44:06.393470 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-99lp5/must-gather-b6rfh"] Dec 03 20:44:06 crc kubenswrapper[4916]: I1203 20:44:06.715821 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-99lp5/must-gather-b6rfh" event={"ID":"28ecef4e-33a3-44b0-8223-102eb3ec2aed","Type":"ContainerStarted","Data":"912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d"} Dec 03 20:44:06 crc kubenswrapper[4916]: I1203 20:44:06.717873 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-99lp5/must-gather-b6rfh" event={"ID":"28ecef4e-33a3-44b0-8223-102eb3ec2aed","Type":"ContainerStarted","Data":"8a4793912f93c723e66d045e328a5872ed1607bb6f0dd0a6e294e96ff3e9d365"} Dec 03 20:44:07 crc kubenswrapper[4916]: I1203 20:44:07.729366 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-99lp5/must-gather-b6rfh" event={"ID":"28ecef4e-33a3-44b0-8223-102eb3ec2aed","Type":"ContainerStarted","Data":"cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928"} Dec 03 20:44:07 crc kubenswrapper[4916]: I1203 20:44:07.752697 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-99lp5/must-gather-b6rfh" podStartSLOduration=2.752671683 podStartE2EDuration="2.752671683s" podCreationTimestamp="2025-12-03 20:44:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:44:07.748670357 +0000 UTC m=+4463.711480633" watchObservedRunningTime="2025-12-03 20:44:07.752671683 +0000 UTC m=+4463.715481959" Dec 03 20:44:08 crc kubenswrapper[4916]: I1203 20:44:08.478560 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:44:08 crc kubenswrapper[4916]: E1203 20:44:08.479015 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:44:10 crc kubenswrapper[4916]: I1203 20:44:10.966724 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-99lp5/crc-debug-q6qvg"] Dec 03 20:44:10 crc kubenswrapper[4916]: I1203 20:44:10.968356 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/crc-debug-q6qvg" Dec 03 20:44:10 crc kubenswrapper[4916]: I1203 20:44:10.969924 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-99lp5"/"default-dockercfg-zdg62" Dec 03 20:44:11 crc kubenswrapper[4916]: I1203 20:44:11.094774 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d78514cd-9e4a-41fa-819f-522f9f998b0f-host\") pod \"crc-debug-q6qvg\" (UID: \"d78514cd-9e4a-41fa-819f-522f9f998b0f\") " pod="openshift-must-gather-99lp5/crc-debug-q6qvg" Dec 03 20:44:11 crc kubenswrapper[4916]: I1203 20:44:11.094883 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsp87\" (UniqueName: \"kubernetes.io/projected/d78514cd-9e4a-41fa-819f-522f9f998b0f-kube-api-access-vsp87\") pod \"crc-debug-q6qvg\" (UID: \"d78514cd-9e4a-41fa-819f-522f9f998b0f\") " pod="openshift-must-gather-99lp5/crc-debug-q6qvg" Dec 03 20:44:11 crc kubenswrapper[4916]: I1203 20:44:11.197003 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d78514cd-9e4a-41fa-819f-522f9f998b0f-host\") pod \"crc-debug-q6qvg\" (UID: \"d78514cd-9e4a-41fa-819f-522f9f998b0f\") " pod="openshift-must-gather-99lp5/crc-debug-q6qvg" Dec 03 20:44:11 crc kubenswrapper[4916]: I1203 20:44:11.197165 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsp87\" (UniqueName: \"kubernetes.io/projected/d78514cd-9e4a-41fa-819f-522f9f998b0f-kube-api-access-vsp87\") pod \"crc-debug-q6qvg\" (UID: \"d78514cd-9e4a-41fa-819f-522f9f998b0f\") " pod="openshift-must-gather-99lp5/crc-debug-q6qvg" Dec 03 20:44:11 crc kubenswrapper[4916]: I1203 20:44:11.197180 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d78514cd-9e4a-41fa-819f-522f9f998b0f-host\") pod \"crc-debug-q6qvg\" (UID: \"d78514cd-9e4a-41fa-819f-522f9f998b0f\") " pod="openshift-must-gather-99lp5/crc-debug-q6qvg" Dec 03 20:44:11 crc kubenswrapper[4916]: I1203 20:44:11.303163 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsp87\" (UniqueName: \"kubernetes.io/projected/d78514cd-9e4a-41fa-819f-522f9f998b0f-kube-api-access-vsp87\") pod \"crc-debug-q6qvg\" (UID: \"d78514cd-9e4a-41fa-819f-522f9f998b0f\") " pod="openshift-must-gather-99lp5/crc-debug-q6qvg" Dec 03 20:44:11 crc kubenswrapper[4916]: I1203 20:44:11.583573 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/crc-debug-q6qvg" Dec 03 20:44:11 crc kubenswrapper[4916]: W1203 20:44:11.628448 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd78514cd_9e4a_41fa_819f_522f9f998b0f.slice/crio-4eb65e72e6e7d171ea5539846ef0ae06be8611c8ade05fd5d071b5994f2cc973 WatchSource:0}: Error finding container 4eb65e72e6e7d171ea5539846ef0ae06be8611c8ade05fd5d071b5994f2cc973: Status 404 returned error can't find the container with id 4eb65e72e6e7d171ea5539846ef0ae06be8611c8ade05fd5d071b5994f2cc973 Dec 03 20:44:11 crc kubenswrapper[4916]: I1203 20:44:11.767806 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-99lp5/crc-debug-q6qvg" event={"ID":"d78514cd-9e4a-41fa-819f-522f9f998b0f","Type":"ContainerStarted","Data":"4eb65e72e6e7d171ea5539846ef0ae06be8611c8ade05fd5d071b5994f2cc973"} Dec 03 20:44:12 crc kubenswrapper[4916]: I1203 20:44:12.776283 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-99lp5/crc-debug-q6qvg" event={"ID":"d78514cd-9e4a-41fa-819f-522f9f998b0f","Type":"ContainerStarted","Data":"542a7e9db7c938b9ae8afcdc976a26f4ee25d0197628f620c1ec0ace18935d4f"} Dec 03 20:44:12 crc kubenswrapper[4916]: I1203 20:44:12.790511 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-99lp5/crc-debug-q6qvg" podStartSLOduration=2.790496241 podStartE2EDuration="2.790496241s" podCreationTimestamp="2025-12-03 20:44:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:44:12.786695461 +0000 UTC m=+4468.749505727" watchObservedRunningTime="2025-12-03 20:44:12.790496241 +0000 UTC m=+4468.753306507" Dec 03 20:44:21 crc kubenswrapper[4916]: I1203 20:44:21.479121 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:44:21 crc kubenswrapper[4916]: E1203 20:44:21.480015 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:44:22 crc kubenswrapper[4916]: I1203 20:44:22.883776 4916 generic.go:334] "Generic (PLEG): container finished" podID="d78514cd-9e4a-41fa-819f-522f9f998b0f" containerID="542a7e9db7c938b9ae8afcdc976a26f4ee25d0197628f620c1ec0ace18935d4f" exitCode=0 Dec 03 20:44:22 crc kubenswrapper[4916]: I1203 20:44:22.883874 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-99lp5/crc-debug-q6qvg" event={"ID":"d78514cd-9e4a-41fa-819f-522f9f998b0f","Type":"ContainerDied","Data":"542a7e9db7c938b9ae8afcdc976a26f4ee25d0197628f620c1ec0ace18935d4f"} Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.052584 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/crc-debug-q6qvg" Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.120995 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-99lp5/crc-debug-q6qvg"] Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.130110 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-99lp5/crc-debug-q6qvg"] Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.146451 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsp87\" (UniqueName: \"kubernetes.io/projected/d78514cd-9e4a-41fa-819f-522f9f998b0f-kube-api-access-vsp87\") pod \"d78514cd-9e4a-41fa-819f-522f9f998b0f\" (UID: \"d78514cd-9e4a-41fa-819f-522f9f998b0f\") " Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.146678 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d78514cd-9e4a-41fa-819f-522f9f998b0f-host\") pod \"d78514cd-9e4a-41fa-819f-522f9f998b0f\" (UID: \"d78514cd-9e4a-41fa-819f-522f9f998b0f\") " Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.147405 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d78514cd-9e4a-41fa-819f-522f9f998b0f-host" (OuterVolumeSpecName: "host") pod "d78514cd-9e4a-41fa-819f-522f9f998b0f" (UID: "d78514cd-9e4a-41fa-819f-522f9f998b0f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.168794 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d78514cd-9e4a-41fa-819f-522f9f998b0f-kube-api-access-vsp87" (OuterVolumeSpecName: "kube-api-access-vsp87") pod "d78514cd-9e4a-41fa-819f-522f9f998b0f" (UID: "d78514cd-9e4a-41fa-819f-522f9f998b0f"). InnerVolumeSpecName "kube-api-access-vsp87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.248730 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsp87\" (UniqueName: \"kubernetes.io/projected/d78514cd-9e4a-41fa-819f-522f9f998b0f-kube-api-access-vsp87\") on node \"crc\" DevicePath \"\"" Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.248953 4916 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d78514cd-9e4a-41fa-819f-522f9f998b0f-host\") on node \"crc\" DevicePath \"\"" Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.489750 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d78514cd-9e4a-41fa-819f-522f9f998b0f" path="/var/lib/kubelet/pods/d78514cd-9e4a-41fa-819f-522f9f998b0f/volumes" Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.906096 4916 scope.go:117] "RemoveContainer" containerID="542a7e9db7c938b9ae8afcdc976a26f4ee25d0197628f620c1ec0ace18935d4f" Dec 03 20:44:24 crc kubenswrapper[4916]: I1203 20:44:24.906248 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/crc-debug-q6qvg" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.274435 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-99lp5/crc-debug-ttjr6"] Dec 03 20:44:25 crc kubenswrapper[4916]: E1203 20:44:25.276265 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d78514cd-9e4a-41fa-819f-522f9f998b0f" containerName="container-00" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.276403 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="d78514cd-9e4a-41fa-819f-522f9f998b0f" containerName="container-00" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.276925 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="d78514cd-9e4a-41fa-819f-522f9f998b0f" containerName="container-00" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.278203 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/crc-debug-ttjr6" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.280238 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-99lp5"/"default-dockercfg-zdg62" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.369055 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpsz7\" (UniqueName: \"kubernetes.io/projected/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-kube-api-access-kpsz7\") pod \"crc-debug-ttjr6\" (UID: \"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938\") " pod="openshift-must-gather-99lp5/crc-debug-ttjr6" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.369132 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-host\") pod \"crc-debug-ttjr6\" (UID: \"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938\") " pod="openshift-must-gather-99lp5/crc-debug-ttjr6" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.491999 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpsz7\" (UniqueName: \"kubernetes.io/projected/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-kube-api-access-kpsz7\") pod \"crc-debug-ttjr6\" (UID: \"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938\") " pod="openshift-must-gather-99lp5/crc-debug-ttjr6" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.492375 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-host\") pod \"crc-debug-ttjr6\" (UID: \"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938\") " pod="openshift-must-gather-99lp5/crc-debug-ttjr6" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.492443 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-host\") pod \"crc-debug-ttjr6\" (UID: \"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938\") " pod="openshift-must-gather-99lp5/crc-debug-ttjr6" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.513380 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpsz7\" (UniqueName: \"kubernetes.io/projected/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-kube-api-access-kpsz7\") pod \"crc-debug-ttjr6\" (UID: \"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938\") " pod="openshift-must-gather-99lp5/crc-debug-ttjr6" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.605634 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/crc-debug-ttjr6" Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.919167 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-99lp5/crc-debug-ttjr6" event={"ID":"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938","Type":"ContainerStarted","Data":"169b1931be428b25db687566694d7364671d6c4918482df5fdb3a2e1ee3b3b0a"} Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.919480 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-99lp5/crc-debug-ttjr6" event={"ID":"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938","Type":"ContainerStarted","Data":"c3ee32eadf004302ca97c5e770c8c8d20b579f2abc6a3646b0d7462f69f82336"} Dec 03 20:44:25 crc kubenswrapper[4916]: I1203 20:44:25.933420 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-99lp5/crc-debug-ttjr6" podStartSLOduration=0.933399114 podStartE2EDuration="933.399114ms" podCreationTimestamp="2025-12-03 20:44:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 20:44:25.931469803 +0000 UTC m=+4481.894280109" watchObservedRunningTime="2025-12-03 20:44:25.933399114 +0000 UTC m=+4481.896209380" Dec 03 20:44:26 crc kubenswrapper[4916]: I1203 20:44:26.934480 4916 generic.go:334] "Generic (PLEG): container finished" podID="f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938" containerID="169b1931be428b25db687566694d7364671d6c4918482df5fdb3a2e1ee3b3b0a" exitCode=1 Dec 03 20:44:26 crc kubenswrapper[4916]: I1203 20:44:26.934589 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-99lp5/crc-debug-ttjr6" event={"ID":"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938","Type":"ContainerDied","Data":"169b1931be428b25db687566694d7364671d6c4918482df5fdb3a2e1ee3b3b0a"} Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.068434 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/crc-debug-ttjr6" Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.110859 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-99lp5/crc-debug-ttjr6"] Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.139905 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-99lp5/crc-debug-ttjr6"] Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.143469 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpsz7\" (UniqueName: \"kubernetes.io/projected/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-kube-api-access-kpsz7\") pod \"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938\" (UID: \"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938\") " Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.143801 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-host\") pod \"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938\" (UID: \"f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938\") " Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.143874 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-host" (OuterVolumeSpecName: "host") pod "f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938" (UID: "f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.144505 4916 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-host\") on node \"crc\" DevicePath \"\"" Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.164612 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-kube-api-access-kpsz7" (OuterVolumeSpecName: "kube-api-access-kpsz7") pod "f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938" (UID: "f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938"). InnerVolumeSpecName "kube-api-access-kpsz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.246620 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpsz7\" (UniqueName: \"kubernetes.io/projected/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938-kube-api-access-kpsz7\") on node \"crc\" DevicePath \"\"" Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.487651 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938" path="/var/lib/kubelet/pods/f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938/volumes" Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.959600 4916 scope.go:117] "RemoveContainer" containerID="169b1931be428b25db687566694d7364671d6c4918482df5fdb3a2e1ee3b3b0a" Dec 03 20:44:28 crc kubenswrapper[4916]: I1203 20:44:28.959717 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/crc-debug-ttjr6" Dec 03 20:44:32 crc kubenswrapper[4916]: I1203 20:44:32.478095 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:44:32 crc kubenswrapper[4916]: E1203 20:44:32.478963 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:44:43 crc kubenswrapper[4916]: I1203 20:44:43.477761 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:44:43 crc kubenswrapper[4916]: E1203 20:44:43.478683 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:44:54 crc kubenswrapper[4916]: I1203 20:44:54.485981 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:44:54 crc kubenswrapper[4916]: E1203 20:44:54.486662 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.210111 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x"] Dec 03 20:45:00 crc kubenswrapper[4916]: E1203 20:45:00.210927 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938" containerName="container-00" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.210940 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938" containerName="container-00" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.211152 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="f18ec02b-b5bf-47d9-9cd4-4c88fe8f3938" containerName="container-00" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.211813 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.217686 4916 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.221580 4916 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.224812 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/daa964bf-3645-4545-9e90-de60c1de5dc4-secret-volume\") pod \"collect-profiles-29413245-knf4x\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.225065 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/daa964bf-3645-4545-9e90-de60c1de5dc4-config-volume\") pod \"collect-profiles-29413245-knf4x\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.225386 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjgpm\" (UniqueName: \"kubernetes.io/projected/daa964bf-3645-4545-9e90-de60c1de5dc4-kube-api-access-zjgpm\") pod \"collect-profiles-29413245-knf4x\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.239873 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x"] Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.327689 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjgpm\" (UniqueName: \"kubernetes.io/projected/daa964bf-3645-4545-9e90-de60c1de5dc4-kube-api-access-zjgpm\") pod \"collect-profiles-29413245-knf4x\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.327778 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/daa964bf-3645-4545-9e90-de60c1de5dc4-secret-volume\") pod \"collect-profiles-29413245-knf4x\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.327851 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/daa964bf-3645-4545-9e90-de60c1de5dc4-config-volume\") pod \"collect-profiles-29413245-knf4x\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.328680 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/daa964bf-3645-4545-9e90-de60c1de5dc4-config-volume\") pod \"collect-profiles-29413245-knf4x\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.334533 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/daa964bf-3645-4545-9e90-de60c1de5dc4-secret-volume\") pod \"collect-profiles-29413245-knf4x\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.366713 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjgpm\" (UniqueName: \"kubernetes.io/projected/daa964bf-3645-4545-9e90-de60c1de5dc4-kube-api-access-zjgpm\") pod \"collect-profiles-29413245-knf4x\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:00 crc kubenswrapper[4916]: I1203 20:45:00.580907 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:01 crc kubenswrapper[4916]: I1203 20:45:01.071073 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x"] Dec 03 20:45:02 crc kubenswrapper[4916]: I1203 20:45:02.319372 4916 generic.go:334] "Generic (PLEG): container finished" podID="daa964bf-3645-4545-9e90-de60c1de5dc4" containerID="8b616f5cc4af70330d6dfb9f8a6c5f102fae5da2c16f7dc4808465775f115a57" exitCode=0 Dec 03 20:45:02 crc kubenswrapper[4916]: I1203 20:45:02.319469 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" event={"ID":"daa964bf-3645-4545-9e90-de60c1de5dc4","Type":"ContainerDied","Data":"8b616f5cc4af70330d6dfb9f8a6c5f102fae5da2c16f7dc4808465775f115a57"} Dec 03 20:45:02 crc kubenswrapper[4916]: I1203 20:45:02.319610 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" event={"ID":"daa964bf-3645-4545-9e90-de60c1de5dc4","Type":"ContainerStarted","Data":"adc3d3663fdd9902938337d8d6e6160cf2420f30c57b5c23a412eb9755ec559e"} Dec 03 20:45:03 crc kubenswrapper[4916]: I1203 20:45:03.700680 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:03 crc kubenswrapper[4916]: I1203 20:45:03.896461 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/daa964bf-3645-4545-9e90-de60c1de5dc4-config-volume\") pod \"daa964bf-3645-4545-9e90-de60c1de5dc4\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " Dec 03 20:45:03 crc kubenswrapper[4916]: I1203 20:45:03.896725 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/daa964bf-3645-4545-9e90-de60c1de5dc4-secret-volume\") pod \"daa964bf-3645-4545-9e90-de60c1de5dc4\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " Dec 03 20:45:03 crc kubenswrapper[4916]: I1203 20:45:03.896770 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjgpm\" (UniqueName: \"kubernetes.io/projected/daa964bf-3645-4545-9e90-de60c1de5dc4-kube-api-access-zjgpm\") pod \"daa964bf-3645-4545-9e90-de60c1de5dc4\" (UID: \"daa964bf-3645-4545-9e90-de60c1de5dc4\") " Dec 03 20:45:03 crc kubenswrapper[4916]: I1203 20:45:03.897361 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daa964bf-3645-4545-9e90-de60c1de5dc4-config-volume" (OuterVolumeSpecName: "config-volume") pod "daa964bf-3645-4545-9e90-de60c1de5dc4" (UID: "daa964bf-3645-4545-9e90-de60c1de5dc4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 20:45:03 crc kubenswrapper[4916]: I1203 20:45:03.904832 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daa964bf-3645-4545-9e90-de60c1de5dc4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "daa964bf-3645-4545-9e90-de60c1de5dc4" (UID: "daa964bf-3645-4545-9e90-de60c1de5dc4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 20:45:03 crc kubenswrapper[4916]: I1203 20:45:03.904948 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daa964bf-3645-4545-9e90-de60c1de5dc4-kube-api-access-zjgpm" (OuterVolumeSpecName: "kube-api-access-zjgpm") pod "daa964bf-3645-4545-9e90-de60c1de5dc4" (UID: "daa964bf-3645-4545-9e90-de60c1de5dc4"). InnerVolumeSpecName "kube-api-access-zjgpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:45:03 crc kubenswrapper[4916]: I1203 20:45:03.998975 4916 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/daa964bf-3645-4545-9e90-de60c1de5dc4-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 20:45:03 crc kubenswrapper[4916]: I1203 20:45:03.999013 4916 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/daa964bf-3645-4545-9e90-de60c1de5dc4-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 20:45:03 crc kubenswrapper[4916]: I1203 20:45:03.999025 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjgpm\" (UniqueName: \"kubernetes.io/projected/daa964bf-3645-4545-9e90-de60c1de5dc4-kube-api-access-zjgpm\") on node \"crc\" DevicePath \"\"" Dec 03 20:45:04 crc kubenswrapper[4916]: I1203 20:45:04.341403 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" event={"ID":"daa964bf-3645-4545-9e90-de60c1de5dc4","Type":"ContainerDied","Data":"adc3d3663fdd9902938337d8d6e6160cf2420f30c57b5c23a412eb9755ec559e"} Dec 03 20:45:04 crc kubenswrapper[4916]: I1203 20:45:04.341443 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adc3d3663fdd9902938337d8d6e6160cf2420f30c57b5c23a412eb9755ec559e" Dec 03 20:45:04 crc kubenswrapper[4916]: I1203 20:45:04.341885 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413245-knf4x" Dec 03 20:45:04 crc kubenswrapper[4916]: I1203 20:45:04.795309 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf"] Dec 03 20:45:04 crc kubenswrapper[4916]: I1203 20:45:04.807397 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413200-xcmqf"] Dec 03 20:45:06 crc kubenswrapper[4916]: I1203 20:45:06.490341 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4c152c6-ef21-44f7-b910-3299502de72a" path="/var/lib/kubelet/pods/d4c152c6-ef21-44f7-b910-3299502de72a/volumes" Dec 03 20:45:08 crc kubenswrapper[4916]: I1203 20:45:08.478025 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:45:08 crc kubenswrapper[4916]: E1203 20:45:08.478979 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:45:20 crc kubenswrapper[4916]: I1203 20:45:20.477971 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:45:20 crc kubenswrapper[4916]: E1203 20:45:20.478837 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:45:35 crc kubenswrapper[4916]: I1203 20:45:35.479495 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:45:35 crc kubenswrapper[4916]: E1203 20:45:35.480693 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.035224 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_aa884253-05a5-47e4-a258-d95aab45bb36/init-config-reloader/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.189824 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_aa884253-05a5-47e4-a258-d95aab45bb36/alertmanager/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.195210 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_aa884253-05a5-47e4-a258-d95aab45bb36/init-config-reloader/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.216010 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_aa884253-05a5-47e4-a258-d95aab45bb36/config-reloader/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.347281 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_2002a42d-6cf6-42dd-99b1-df69ff30bc53/aodh-api/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.385632 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_2002a42d-6cf6-42dd-99b1-df69ff30bc53/aodh-evaluator/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.429605 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_2002a42d-6cf6-42dd-99b1-df69ff30bc53/aodh-listener/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.513987 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_2002a42d-6cf6-42dd-99b1-df69ff30bc53/aodh-notifier/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.572030 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-f787c8578-2cjjd_eda087f8-dbb4-47ca-a210-576abc73a55e/barbican-api/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.610261 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-f787c8578-2cjjd_eda087f8-dbb4-47ca-a210-576abc73a55e/barbican-api-log/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.763108 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7755d7d784-5sk27_c3d699e4-f5e8-4719-bc16-b5a85bcaa695/barbican-keystone-listener/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.783763 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7755d7d784-5sk27_c3d699e4-f5e8-4719-bc16-b5a85bcaa695/barbican-keystone-listener-log/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.928526 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c644b9d95-mhwlb_b33d04f2-ecc8-4c07-b258-60918f9aff05/barbican-worker/0.log" Dec 03 20:45:38 crc kubenswrapper[4916]: I1203 20:45:38.959339 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c644b9d95-mhwlb_b33d04f2-ecc8-4c07-b258-60918f9aff05/barbican-worker-log/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.091080 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-r5vtp_93e63900-68b9-4c76-b614-78dcd0862645/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.148905 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6f01d12e-f1c3-4da2-b3bc-31623e4a2493/ceilometer-central-agent/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.236398 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6f01d12e-f1c3-4da2-b3bc-31623e4a2493/ceilometer-notification-agent/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.333723 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6f01d12e-f1c3-4da2-b3bc-31623e4a2493/sg-core/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.349016 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_6f01d12e-f1c3-4da2-b3bc-31623e4a2493/proxy-httpd/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.468865 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_b816c89d-8a9e-48c7-841a-dcb5ee7ab0df/cinder-api/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.499629 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_b816c89d-8a9e-48c7-841a-dcb5ee7ab0df/cinder-api-log/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.633810 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2727cbbe-cad4-47ff-b451-2f66b4f65bbf/cinder-scheduler/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.681656 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2727cbbe-cad4-47ff-b451-2f66b4f65bbf/probe/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.768909 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-v727v_8d641422-c093-42d7-bc60-6df1dd5b0796/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.866962 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-mpzkz_61740a0d-2157-431d-a999-802aad6cb402/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:39 crc kubenswrapper[4916]: I1203 20:45:39.996382 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6f6df4f56c-xfpvl_5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62/init/0.log" Dec 03 20:45:40 crc kubenswrapper[4916]: I1203 20:45:40.162628 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6f6df4f56c-xfpvl_5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62/init/0.log" Dec 03 20:45:40 crc kubenswrapper[4916]: I1203 20:45:40.219624 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6f6df4f56c-xfpvl_5057e4b5-76a4-40e2-b6cb-a5e6e1afcc62/dnsmasq-dns/0.log" Dec 03 20:45:40 crc kubenswrapper[4916]: I1203 20:45:40.233698 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-tdq5l_f236d742-b29b-42c2-90ac-70d01657b967/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:40 crc kubenswrapper[4916]: I1203 20:45:40.440456 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_09b23ba0-7111-4c00-9ecc-a4ea541b3ca4/glance-httpd/0.log" Dec 03 20:45:40 crc kubenswrapper[4916]: I1203 20:45:40.456744 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_09b23ba0-7111-4c00-9ecc-a4ea541b3ca4/glance-log/0.log" Dec 03 20:45:40 crc kubenswrapper[4916]: I1203 20:45:40.595100 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_65acea52-6e4e-44c7-9406-bc296db6821b/glance-httpd/0.log" Dec 03 20:45:40 crc kubenswrapper[4916]: I1203 20:45:40.618130 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_65acea52-6e4e-44c7-9406-bc296db6821b/glance-log/0.log" Dec 03 20:45:41 crc kubenswrapper[4916]: I1203 20:45:41.027372 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-56c49bcc9c-497gn_af5156d3-f2f0-4963-8561-5eac0b719c9a/heat-api/0.log" Dec 03 20:45:41 crc kubenswrapper[4916]: I1203 20:45:41.113090 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-7b767dc896-5v8nl_e7ee41e1-65cb-4642-ae18-5f0a926d8c1d/heat-engine/0.log" Dec 03 20:45:41 crc kubenswrapper[4916]: I1203 20:45:41.200873 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-8m7fp_c94bfa68-5e27-47fe-a55e-b05abead70ac/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:41 crc kubenswrapper[4916]: I1203 20:45:41.255428 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-84fcbd5864-k72dj_480f4ab0-3854-480f-9dd8-d44be1454e48/heat-cfnapi/0.log" Dec 03 20:45:41 crc kubenswrapper[4916]: I1203 20:45:41.379662 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-88vj9_6b66d006-a019-4921-9663-8fc348caf782/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:41 crc kubenswrapper[4916]: I1203 20:45:41.474070 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7c48bb485f-tqvlz_288ed5cf-795f-44fd-8ae8-ba522e48a62e/keystone-api/0.log" Dec 03 20:45:41 crc kubenswrapper[4916]: I1203 20:45:41.486054 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29413201-56f24_f303570c-cd6a-4249-9f85-dda22c04e2a7/keystone-cron/0.log" Dec 03 20:45:41 crc kubenswrapper[4916]: I1203 20:45:41.587819 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_9d38924d-74b6-46db-9588-fa5c485fba69/kube-state-metrics/0.log" Dec 03 20:45:41 crc kubenswrapper[4916]: I1203 20:45:41.750774 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-6t8b8_6c68c375-3a19-46dc-8d30-dd8f6edf361e/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:41 crc kubenswrapper[4916]: I1203 20:45:41.922913 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6b599f5db5-cs2bs_c924271a-a9cb-45cd-b1ab-3631a27c81aa/neutron-api/0.log" Dec 03 20:45:41 crc kubenswrapper[4916]: I1203 20:45:41.942404 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6b599f5db5-cs2bs_c924271a-a9cb-45cd-b1ab-3631a27c81aa/neutron-httpd/0.log" Dec 03 20:45:42 crc kubenswrapper[4916]: I1203 20:45:42.189333 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-xc6j4_257d92ae-6326-4650-830c-b29ed36146e7/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:42 crc kubenswrapper[4916]: I1203 20:45:42.384366 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d9f3c72c-b924-4d5e-8c68-f62d5e83a870/nova-api-log/0.log" Dec 03 20:45:42 crc kubenswrapper[4916]: I1203 20:45:42.582337 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_a5a290e9-1938-4d33-a6b5-f7490d7a6bcc/nova-cell0-conductor-conductor/0.log" Dec 03 20:45:42 crc kubenswrapper[4916]: I1203 20:45:42.773368 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_d9f3c72c-b924-4d5e-8c68-f62d5e83a870/nova-api-api/0.log" Dec 03 20:45:42 crc kubenswrapper[4916]: I1203 20:45:42.790520 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_22c68a9c-f222-4118-b636-311954e0d502/nova-cell1-conductor-conductor/0.log" Dec 03 20:45:42 crc kubenswrapper[4916]: I1203 20:45:42.888091 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_afa6fb7e-4053-4afb-89d5-2bce4d35c456/nova-cell1-novncproxy-novncproxy/0.log" Dec 03 20:45:43 crc kubenswrapper[4916]: I1203 20:45:43.007978 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-zk7x7_8eaccb2f-783d-4da3-90ae-c88fdfef6c86/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:43 crc kubenswrapper[4916]: I1203 20:45:43.242456 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d3baf082-dd08-4c10-aac9-8ce2874aa2ae/nova-metadata-log/0.log" Dec 03 20:45:43 crc kubenswrapper[4916]: I1203 20:45:43.478135 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8b94f14-6cc4-4c21-969c-e1aeb3c199fe/mysql-bootstrap/0.log" Dec 03 20:45:43 crc kubenswrapper[4916]: I1203 20:45:43.496008 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_615a55a3-e9f9-4261-96a6-bcf865f0c183/nova-scheduler-scheduler/0.log" Dec 03 20:45:43 crc kubenswrapper[4916]: I1203 20:45:43.663445 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8b94f14-6cc4-4c21-969c-e1aeb3c199fe/mysql-bootstrap/0.log" Dec 03 20:45:43 crc kubenswrapper[4916]: I1203 20:45:43.734167 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d8b94f14-6cc4-4c21-969c-e1aeb3c199fe/galera/0.log" Dec 03 20:45:43 crc kubenswrapper[4916]: I1203 20:45:43.859530 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_85db28fe-52b4-4feb-8461-8c7a7e6e5179/mysql-bootstrap/0.log" Dec 03 20:45:44 crc kubenswrapper[4916]: I1203 20:45:44.013590 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_85db28fe-52b4-4feb-8461-8c7a7e6e5179/mysql-bootstrap/0.log" Dec 03 20:45:44 crc kubenswrapper[4916]: I1203 20:45:44.051223 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_85db28fe-52b4-4feb-8461-8c7a7e6e5179/galera/0.log" Dec 03 20:45:44 crc kubenswrapper[4916]: I1203 20:45:44.576452 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_0e722523-b45d-4256-a08c-088a095f77f5/openstackclient/0.log" Dec 03 20:45:44 crc kubenswrapper[4916]: I1203 20:45:44.588858 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ndv46_c79e8746-0571-48ab-ad7d-94b92eadc07e/openstack-network-exporter/0.log" Dec 03 20:45:44 crc kubenswrapper[4916]: I1203 20:45:44.741667 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_d3baf082-dd08-4c10-aac9-8ce2874aa2ae/nova-metadata-metadata/0.log" Dec 03 20:45:44 crc kubenswrapper[4916]: I1203 20:45:44.842061 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7wkt5_4180ae6f-d0a4-4af0-b89c-48ab118b3f8c/ovsdb-server-init/0.log" Dec 03 20:45:44 crc kubenswrapper[4916]: I1203 20:45:44.994399 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7wkt5_4180ae6f-d0a4-4af0-b89c-48ab118b3f8c/ovsdb-server/0.log" Dec 03 20:45:45 crc kubenswrapper[4916]: I1203 20:45:45.004498 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7wkt5_4180ae6f-d0a4-4af0-b89c-48ab118b3f8c/ovsdb-server-init/0.log" Dec 03 20:45:45 crc kubenswrapper[4916]: I1203 20:45:45.024750 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7wkt5_4180ae6f-d0a4-4af0-b89c-48ab118b3f8c/ovs-vswitchd/0.log" Dec 03 20:45:45 crc kubenswrapper[4916]: I1203 20:45:45.176025 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-stq8b_7cb5f017-c41b-4af3-8455-e1ab42faa626/ovn-controller/0.log" Dec 03 20:45:45 crc kubenswrapper[4916]: I1203 20:45:45.237939 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qv9hr_49da2e3d-9d45-478a-b073-beb7a5ca51ae/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:45 crc kubenswrapper[4916]: I1203 20:45:45.543515 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_ec7f91f3-02c9-42a9-b415-aa58806d9b17/openstack-network-exporter/0.log" Dec 03 20:45:45 crc kubenswrapper[4916]: I1203 20:45:45.569443 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_ec7f91f3-02c9-42a9-b415-aa58806d9b17/ovn-northd/0.log" Dec 03 20:45:46 crc kubenswrapper[4916]: I1203 20:45:46.385206 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_87458b34-0f3f-430d-8c93-a3138854fc20/ovsdbserver-nb/0.log" Dec 03 20:45:46 crc kubenswrapper[4916]: I1203 20:45:46.394204 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_87458b34-0f3f-430d-8c93-a3138854fc20/openstack-network-exporter/0.log" Dec 03 20:45:46 crc kubenswrapper[4916]: I1203 20:45:46.411290 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_8e2bf00a-bdbe-4c59-b020-b1c3d96375f3/openstack-network-exporter/0.log" Dec 03 20:45:46 crc kubenswrapper[4916]: I1203 20:45:46.477831 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:45:46 crc kubenswrapper[4916]: E1203 20:45:46.478152 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:45:46 crc kubenswrapper[4916]: I1203 20:45:46.580219 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_8e2bf00a-bdbe-4c59-b020-b1c3d96375f3/ovsdbserver-sb/0.log" Dec 03 20:45:46 crc kubenswrapper[4916]: I1203 20:45:46.714033 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5747d5b464-dtdts_b6858994-e73c-4542-9cb1-5bb0213f35bf/placement-api/0.log" Dec 03 20:45:46 crc kubenswrapper[4916]: I1203 20:45:46.746292 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5747d5b464-dtdts_b6858994-e73c-4542-9cb1-5bb0213f35bf/placement-log/0.log" Dec 03 20:45:46 crc kubenswrapper[4916]: I1203 20:45:46.834494 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f14e71ef-0642-4eff-b6c8-08454ad04ccf/init-config-reloader/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.053472 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f14e71ef-0642-4eff-b6c8-08454ad04ccf/init-config-reloader/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.134409 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f14e71ef-0642-4eff-b6c8-08454ad04ccf/thanos-sidecar/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.141697 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f14e71ef-0642-4eff-b6c8-08454ad04ccf/config-reloader/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.148801 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_f14e71ef-0642-4eff-b6c8-08454ad04ccf/prometheus/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.320934 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c3d2c7b8-c85e-4806-986b-55b486864e84/setup-container/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.502818 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c3d2c7b8-c85e-4806-986b-55b486864e84/setup-container/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.543878 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_c3d2c7b8-c85e-4806-986b-55b486864e84/rabbitmq/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.566614 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a80b26ac-d55e-4513-9a8d-a70a0b197433/setup-container/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.698849 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a80b26ac-d55e-4513-9a8d-a70a0b197433/setup-container/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.762007 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-m572f_c7109f13-10df-437e-96da-34c0889a9231/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.783636 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_a80b26ac-d55e-4513-9a8d-a70a0b197433/rabbitmq/0.log" Dec 03 20:45:47 crc kubenswrapper[4916]: I1203 20:45:47.972850 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-bbf7x_af00bdd2-2610-40a8-b6d7-1252796d9341/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:48 crc kubenswrapper[4916]: I1203 20:45:48.050926 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-gpdfd_d2eaefa8-6147-45c5-ae3e-77e0d47c2d11/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:48 crc kubenswrapper[4916]: I1203 20:45:48.229200 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-4pcnc_ace07bb7-8494-4a26-9737-33b0407dde91/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:48 crc kubenswrapper[4916]: I1203 20:45:48.251093 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-qcp8f_d3c34a0f-3914-4307-9e37-317749a61c02/ssh-known-hosts-edpm-deployment/0.log" Dec 03 20:45:48 crc kubenswrapper[4916]: I1203 20:45:48.485626 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5fb66fd5df-mqd8w_7c9b70bb-7121-4484-9d1c-f928d26b6f3a/proxy-server/0.log" Dec 03 20:45:48 crc kubenswrapper[4916]: I1203 20:45:48.565741 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5fb66fd5df-mqd8w_7c9b70bb-7121-4484-9d1c-f928d26b6f3a/proxy-httpd/0.log" Dec 03 20:45:48 crc kubenswrapper[4916]: I1203 20:45:48.686450 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-wfwfd_955b2a04-73e1-4ab5-b322-e301684e8785/swift-ring-rebalance/0.log" Dec 03 20:45:48 crc kubenswrapper[4916]: I1203 20:45:48.797964 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/account-reaper/0.log" Dec 03 20:45:48 crc kubenswrapper[4916]: I1203 20:45:48.864810 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/account-auditor/0.log" Dec 03 20:45:48 crc kubenswrapper[4916]: I1203 20:45:48.986361 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/account-replicator/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.032430 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/account-server/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.034099 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/container-auditor/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.158630 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/container-replicator/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.214076 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/container-server/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.276988 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/container-updater/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.289907 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/object-auditor/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.392438 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/object-expirer/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.473812 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/object-server/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.480866 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/object-updater/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.489648 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/object-replicator/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.608844 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/rsync/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.658087 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb20ada8-afbe-42ef-a8b8-a22cd16d4dc5/swift-recon-cron/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.758948 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-pbn2p_531e7486-e849-4176-b8d7-b93e11082c0a/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:49 crc kubenswrapper[4916]: I1203 20:45:49.857908 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-hnz8h_d64868ee-2aa9-48b3-bfd7-895a9daf8c5a/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 20:45:55 crc kubenswrapper[4916]: I1203 20:45:55.742521 4916 scope.go:117] "RemoveContainer" containerID="9f608ec231c183b8e79ab4b193eb4d382c013849808fe5932e2a0f82953031f7" Dec 03 20:45:58 crc kubenswrapper[4916]: I1203 20:45:58.477549 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:45:58 crc kubenswrapper[4916]: E1203 20:45:58.478427 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:45:59 crc kubenswrapper[4916]: I1203 20:45:59.160222 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_431d6c6e-0ec4-4eae-8bee-4fdce5e2328d/memcached/0.log" Dec 03 20:46:13 crc kubenswrapper[4916]: I1203 20:46:13.478447 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:46:13 crc kubenswrapper[4916]: E1203 20:46:13.479522 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:46:19 crc kubenswrapper[4916]: I1203 20:46:19.761887 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/util/0.log" Dec 03 20:46:19 crc kubenswrapper[4916]: I1203 20:46:19.921991 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/util/0.log" Dec 03 20:46:19 crc kubenswrapper[4916]: I1203 20:46:19.937402 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/pull/0.log" Dec 03 20:46:19 crc kubenswrapper[4916]: I1203 20:46:19.998635 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/pull/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.122651 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/util/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.123334 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/extract/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.147494 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0c2e265fe131cb14936d72cd088fca614a520bc8ac8ffc13227d716c109tvhd_6d033d79-255d-44d4-8082-b1044f95ab2e/pull/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.334629 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-xn476_d2c7e9c7-96cd-47bd-978a-c3fd41c74089/kube-rbac-proxy/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.384772 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-6pzj5_232cd6fc-5f1d-4398-ae7d-5c34f49843f3/kube-rbac-proxy/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.444380 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-xn476_d2c7e9c7-96cd-47bd-978a-c3fd41c74089/manager/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.585865 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-nsfm2_cf093783-d31b-42fc-a85f-fff6c35fdae8/kube-rbac-proxy/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.617272 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-6pzj5_232cd6fc-5f1d-4398-ae7d-5c34f49843f3/manager/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.732944 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-nsfm2_cf093783-d31b-42fc-a85f-fff6c35fdae8/manager/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.802012 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-x6tb6_a83c7d54-9430-456f-b83e-abed5d9030b8/kube-rbac-proxy/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.886343 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-x6tb6_a83c7d54-9430-456f-b83e-abed5d9030b8/manager/0.log" Dec 03 20:46:20 crc kubenswrapper[4916]: I1203 20:46:20.990006 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-csdvv_b63d5bf7-8901-4bce-90d9-0006ae946230/kube-rbac-proxy/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.106016 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-csdvv_b63d5bf7-8901-4bce-90d9-0006ae946230/manager/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.128016 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-kndn4_31355b8f-c9a1-4ddf-a97f-de6d4f506a67/kube-rbac-proxy/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.213147 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-kndn4_31355b8f-c9a1-4ddf-a97f-de6d4f506a67/manager/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.315113 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-dth7w_a571d18b-686d-472d-9086-e192ec504db4/kube-rbac-proxy/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.498792 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-dth7w_a571d18b-686d-472d-9086-e192ec504db4/manager/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.529388 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-jltdg_907117b8-0a09-440c-bb47-bfa09ccec80b/kube-rbac-proxy/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.544541 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-jltdg_907117b8-0a09-440c-bb47-bfa09ccec80b/manager/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.692930 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-6x48f_877f6f89-deed-4f06-adb1-cfa6b5254db2/kube-rbac-proxy/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.785226 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-6x48f_877f6f89-deed-4f06-adb1-cfa6b5254db2/manager/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.879653 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-sd86t_7303e0f2-e41d-4220-a72b-88e6b44b016c/kube-rbac-proxy/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.904873 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-sd86t_7303e0f2-e41d-4220-a72b-88e6b44b016c/manager/0.log" Dec 03 20:46:21 crc kubenswrapper[4916]: I1203 20:46:21.986832 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-7q6dq_dc81071a-3da1-4e63-b733-13e39ecfb823/kube-rbac-proxy/0.log" Dec 03 20:46:22 crc kubenswrapper[4916]: I1203 20:46:22.098519 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-7q6dq_dc81071a-3da1-4e63-b733-13e39ecfb823/manager/0.log" Dec 03 20:46:22 crc kubenswrapper[4916]: I1203 20:46:22.165634 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-mllvg_251c39c1-e63c-4772-a0e6-88528867a64d/kube-rbac-proxy/0.log" Dec 03 20:46:22 crc kubenswrapper[4916]: I1203 20:46:22.263801 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-mllvg_251c39c1-e63c-4772-a0e6-88528867a64d/manager/0.log" Dec 03 20:46:22 crc kubenswrapper[4916]: I1203 20:46:22.393079 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-29mr4_462898ce-79ab-4cd6-b05e-e19b65c80fa1/kube-rbac-proxy/0.log" Dec 03 20:46:22 crc kubenswrapper[4916]: I1203 20:46:22.458452 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-29mr4_462898ce-79ab-4cd6-b05e-e19b65c80fa1/manager/0.log" Dec 03 20:46:22 crc kubenswrapper[4916]: I1203 20:46:22.557754 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-wp7lm_fc0e679d-4033-4479-ba7a-cdc160e0b6ad/manager/0.log" Dec 03 20:46:22 crc kubenswrapper[4916]: I1203 20:46:22.596004 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-wp7lm_fc0e679d-4033-4479-ba7a-cdc160e0b6ad/kube-rbac-proxy/0.log" Dec 03 20:46:22 crc kubenswrapper[4916]: I1203 20:46:22.718025 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs_f356aae7-fed8-4f1b-a863-d7b47bcda904/manager/0.log" Dec 03 20:46:22 crc kubenswrapper[4916]: I1203 20:46:22.754184 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4qg8fs_f356aae7-fed8-4f1b-a863-d7b47bcda904/kube-rbac-proxy/0.log" Dec 03 20:46:23 crc kubenswrapper[4916]: I1203 20:46:23.235466 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-n7msw_005f663b-9f61-44c6-8958-bbb7311df12e/registry-server/0.log" Dec 03 20:46:23 crc kubenswrapper[4916]: I1203 20:46:23.365790 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-8547fd68fd-vc7gd_e623d74a-1d16-43ec-a04f-a1817a8a8294/operator/0.log" Dec 03 20:46:23 crc kubenswrapper[4916]: I1203 20:46:23.420881 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-cjms4_c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae/kube-rbac-proxy/0.log" Dec 03 20:46:23 crc kubenswrapper[4916]: I1203 20:46:23.501059 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-cjms4_c8130b26-ee3d-4fc2-87c6-830aa4dcf0ae/manager/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.103605 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-xvp6h_8829329b-8de5-4a0d-bd48-9cb7338c2dd1/kube-rbac-proxy/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.156464 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-xvp6h_8829329b-8de5-4a0d-bd48-9cb7338c2dd1/manager/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.305402 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-m5s6k_1a7e3254-35ac-48fa-8ab7-11e85c780369/operator/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.313035 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-d8ff785c7-jbr4p_e7ce3ac9-9ba0-4991-a95b-c33fca5a2ed2/manager/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.420793 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-7bs98_04ce9fc2-2134-4d10-b3d8-764bca295eed/manager/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.437927 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-7bs98_04ce9fc2-2134-4d10-b3d8-764bca295eed/kube-rbac-proxy/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.528927 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_6f4635b6-2410-4d5f-a7c9-3cf0a04739f7/kube-rbac-proxy/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.666264 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-2txxc_a5bc0003-390d-477e-8b21-f7fda61cb051/kube-rbac-proxy/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.760347 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-65b6f7cdd5-fbb7s_6f4635b6-2410-4d5f-a7c9-3cf0a04739f7/manager/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.780836 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-2txxc_a5bc0003-390d-477e-8b21-f7fda61cb051/manager/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.868612 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-7vz8k_463726a8-9ad4-486b-b5b8-166fed3a6190/kube-rbac-proxy/0.log" Dec 03 20:46:24 crc kubenswrapper[4916]: I1203 20:46:24.905214 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-7vz8k_463726a8-9ad4-486b-b5b8-166fed3a6190/manager/0.log" Dec 03 20:46:26 crc kubenswrapper[4916]: I1203 20:46:26.478553 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:46:26 crc kubenswrapper[4916]: E1203 20:46:26.480184 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:46:37 crc kubenswrapper[4916]: I1203 20:46:37.480429 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:46:37 crc kubenswrapper[4916]: E1203 20:46:37.481292 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:46:46 crc kubenswrapper[4916]: I1203 20:46:46.794960 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-7rwpc_75435454-50d3-4560-821e-a5b8c171652b/control-plane-machine-set-operator/0.log" Dec 03 20:46:47 crc kubenswrapper[4916]: I1203 20:46:47.589032 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6ngfp_dbed5156-bd14-449e-943a-488606ac49e2/machine-api-operator/0.log" Dec 03 20:46:47 crc kubenswrapper[4916]: I1203 20:46:47.604426 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6ngfp_dbed5156-bd14-449e-943a-488606ac49e2/kube-rbac-proxy/0.log" Dec 03 20:46:50 crc kubenswrapper[4916]: I1203 20:46:50.478149 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:46:51 crc kubenswrapper[4916]: I1203 20:46:51.359000 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"41adc5ebb421bc69f1dce3db28abc216e5f622afd62161d7d8963c109aa5e64f"} Dec 03 20:47:01 crc kubenswrapper[4916]: I1203 20:47:01.061416 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-x7n5p_2f8dae92-e705-48ac-ae95-fb463698c6a7/cert-manager-controller/0.log" Dec 03 20:47:01 crc kubenswrapper[4916]: I1203 20:47:01.268594 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-pz52m_6e2a44e6-d6b3-4b89-a243-7a9f66b36e18/cert-manager-cainjector/0.log" Dec 03 20:47:01 crc kubenswrapper[4916]: I1203 20:47:01.348089 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-gvbbc_8ac2850b-bf3b-4c3c-a1d1-8e59ce302246/cert-manager-webhook/0.log" Dec 03 20:47:15 crc kubenswrapper[4916]: I1203 20:47:15.452524 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-gbx7d_9ce1f525-9698-4837-8ad5-990c187fd275/nmstate-console-plugin/0.log" Dec 03 20:47:15 crc kubenswrapper[4916]: I1203 20:47:15.673074 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-gnlxq_7603911b-7915-49df-afc6-e80da6dd90f2/nmstate-handler/0.log" Dec 03 20:47:15 crc kubenswrapper[4916]: I1203 20:47:15.740624 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-nq5wn_a64200e6-4761-4d12-b787-7e0260253ffd/kube-rbac-proxy/0.log" Dec 03 20:47:15 crc kubenswrapper[4916]: I1203 20:47:15.774425 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-nq5wn_a64200e6-4761-4d12-b787-7e0260253ffd/nmstate-metrics/0.log" Dec 03 20:47:15 crc kubenswrapper[4916]: I1203 20:47:15.893074 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-6jmkr_697f3c53-6482-4054-8f02-fe024ba5f514/nmstate-operator/0.log" Dec 03 20:47:15 crc kubenswrapper[4916]: I1203 20:47:15.958202 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-tqmwt_1710047a-61c2-485d-ad6c-05691f102e43/nmstate-webhook/0.log" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.201396 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m7n6q"] Dec 03 20:47:18 crc kubenswrapper[4916]: E1203 20:47:18.202147 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daa964bf-3645-4545-9e90-de60c1de5dc4" containerName="collect-profiles" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.202162 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="daa964bf-3645-4545-9e90-de60c1de5dc4" containerName="collect-profiles" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.202348 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="daa964bf-3645-4545-9e90-de60c1de5dc4" containerName="collect-profiles" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.204440 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.222930 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m7n6q"] Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.281572 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzq69\" (UniqueName: \"kubernetes.io/projected/1c7e8355-2930-423f-88d7-7830bc0382a1-kube-api-access-kzq69\") pod \"certified-operators-m7n6q\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.281927 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-utilities\") pod \"certified-operators-m7n6q\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.282074 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-catalog-content\") pod \"certified-operators-m7n6q\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.384157 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-catalog-content\") pod \"certified-operators-m7n6q\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.384232 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzq69\" (UniqueName: \"kubernetes.io/projected/1c7e8355-2930-423f-88d7-7830bc0382a1-kube-api-access-kzq69\") pod \"certified-operators-m7n6q\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.384374 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-utilities\") pod \"certified-operators-m7n6q\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.384706 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-catalog-content\") pod \"certified-operators-m7n6q\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.384723 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-utilities\") pod \"certified-operators-m7n6q\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.412406 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzq69\" (UniqueName: \"kubernetes.io/projected/1c7e8355-2930-423f-88d7-7830bc0382a1-kube-api-access-kzq69\") pod \"certified-operators-m7n6q\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:18 crc kubenswrapper[4916]: I1203 20:47:18.534646 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:19 crc kubenswrapper[4916]: I1203 20:47:19.042810 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m7n6q"] Dec 03 20:47:19 crc kubenswrapper[4916]: I1203 20:47:19.639670 4916 generic.go:334] "Generic (PLEG): container finished" podID="1c7e8355-2930-423f-88d7-7830bc0382a1" containerID="c2000ae893437d76dca4ad7fb81bf09c67e39918a6b51c67fac5193dca451185" exitCode=0 Dec 03 20:47:19 crc kubenswrapper[4916]: I1203 20:47:19.639790 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7n6q" event={"ID":"1c7e8355-2930-423f-88d7-7830bc0382a1","Type":"ContainerDied","Data":"c2000ae893437d76dca4ad7fb81bf09c67e39918a6b51c67fac5193dca451185"} Dec 03 20:47:19 crc kubenswrapper[4916]: I1203 20:47:19.640001 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7n6q" event={"ID":"1c7e8355-2930-423f-88d7-7830bc0382a1","Type":"ContainerStarted","Data":"e745c5d217f07d21652047365b421172602c89ab05e71c520a6ac209950aa196"} Dec 03 20:47:19 crc kubenswrapper[4916]: I1203 20:47:19.642828 4916 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 20:47:20 crc kubenswrapper[4916]: I1203 20:47:20.654966 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7n6q" event={"ID":"1c7e8355-2930-423f-88d7-7830bc0382a1","Type":"ContainerStarted","Data":"1236a49688aad098880450a575bdefa24cd4b00a8774d7f9382cb382bb698496"} Dec 03 20:47:21 crc kubenswrapper[4916]: I1203 20:47:21.672429 4916 generic.go:334] "Generic (PLEG): container finished" podID="1c7e8355-2930-423f-88d7-7830bc0382a1" containerID="1236a49688aad098880450a575bdefa24cd4b00a8774d7f9382cb382bb698496" exitCode=0 Dec 03 20:47:21 crc kubenswrapper[4916]: I1203 20:47:21.672634 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7n6q" event={"ID":"1c7e8355-2930-423f-88d7-7830bc0382a1","Type":"ContainerDied","Data":"1236a49688aad098880450a575bdefa24cd4b00a8774d7f9382cb382bb698496"} Dec 03 20:47:22 crc kubenswrapper[4916]: I1203 20:47:22.685703 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7n6q" event={"ID":"1c7e8355-2930-423f-88d7-7830bc0382a1","Type":"ContainerStarted","Data":"c674ac0d98509597ee51ec69081e80b66da70e205d147a2eff3785dce3b0a1bd"} Dec 03 20:47:22 crc kubenswrapper[4916]: I1203 20:47:22.703534 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m7n6q" podStartSLOduration=2.262892146 podStartE2EDuration="4.703518012s" podCreationTimestamp="2025-12-03 20:47:18 +0000 UTC" firstStartedPulling="2025-12-03 20:47:19.642358466 +0000 UTC m=+4655.605168772" lastFinishedPulling="2025-12-03 20:47:22.082984372 +0000 UTC m=+4658.045794638" observedRunningTime="2025-12-03 20:47:22.701832058 +0000 UTC m=+4658.664642324" watchObservedRunningTime="2025-12-03 20:47:22.703518012 +0000 UTC m=+4658.666328278" Dec 03 20:47:28 crc kubenswrapper[4916]: I1203 20:47:28.535421 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:28 crc kubenswrapper[4916]: I1203 20:47:28.536087 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:28 crc kubenswrapper[4916]: I1203 20:47:28.616590 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:28 crc kubenswrapper[4916]: I1203 20:47:28.815022 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:28 crc kubenswrapper[4916]: I1203 20:47:28.866601 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m7n6q"] Dec 03 20:47:30 crc kubenswrapper[4916]: I1203 20:47:30.782244 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m7n6q" podUID="1c7e8355-2930-423f-88d7-7830bc0382a1" containerName="registry-server" containerID="cri-o://c674ac0d98509597ee51ec69081e80b66da70e205d147a2eff3785dce3b0a1bd" gracePeriod=2 Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.795902 4916 generic.go:334] "Generic (PLEG): container finished" podID="1c7e8355-2930-423f-88d7-7830bc0382a1" containerID="c674ac0d98509597ee51ec69081e80b66da70e205d147a2eff3785dce3b0a1bd" exitCode=0 Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.795963 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7n6q" event={"ID":"1c7e8355-2930-423f-88d7-7830bc0382a1","Type":"ContainerDied","Data":"c674ac0d98509597ee51ec69081e80b66da70e205d147a2eff3785dce3b0a1bd"} Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.796404 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m7n6q" event={"ID":"1c7e8355-2930-423f-88d7-7830bc0382a1","Type":"ContainerDied","Data":"e745c5d217f07d21652047365b421172602c89ab05e71c520a6ac209950aa196"} Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.796419 4916 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e745c5d217f07d21652047365b421172602c89ab05e71c520a6ac209950aa196" Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.825594 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.864885 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-catalog-content\") pod \"1c7e8355-2930-423f-88d7-7830bc0382a1\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.865000 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-utilities\") pod \"1c7e8355-2930-423f-88d7-7830bc0382a1\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.865264 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzq69\" (UniqueName: \"kubernetes.io/projected/1c7e8355-2930-423f-88d7-7830bc0382a1-kube-api-access-kzq69\") pod \"1c7e8355-2930-423f-88d7-7830bc0382a1\" (UID: \"1c7e8355-2930-423f-88d7-7830bc0382a1\") " Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.865673 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-utilities" (OuterVolumeSpecName: "utilities") pod "1c7e8355-2930-423f-88d7-7830bc0382a1" (UID: "1c7e8355-2930-423f-88d7-7830bc0382a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.866109 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.877778 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c7e8355-2930-423f-88d7-7830bc0382a1-kube-api-access-kzq69" (OuterVolumeSpecName: "kube-api-access-kzq69") pod "1c7e8355-2930-423f-88d7-7830bc0382a1" (UID: "1c7e8355-2930-423f-88d7-7830bc0382a1"). InnerVolumeSpecName "kube-api-access-kzq69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.923305 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1c7e8355-2930-423f-88d7-7830bc0382a1" (UID: "1c7e8355-2930-423f-88d7-7830bc0382a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.968101 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c7e8355-2930-423f-88d7-7830bc0382a1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:47:31 crc kubenswrapper[4916]: I1203 20:47:31.968147 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzq69\" (UniqueName: \"kubernetes.io/projected/1c7e8355-2930-423f-88d7-7830bc0382a1-kube-api-access-kzq69\") on node \"crc\" DevicePath \"\"" Dec 03 20:47:32 crc kubenswrapper[4916]: I1203 20:47:32.813683 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m7n6q" Dec 03 20:47:32 crc kubenswrapper[4916]: I1203 20:47:32.834920 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m7n6q"] Dec 03 20:47:32 crc kubenswrapper[4916]: I1203 20:47:32.854622 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m7n6q"] Dec 03 20:47:33 crc kubenswrapper[4916]: I1203 20:47:33.967099 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-zcqns_ca08d810-a5ec-4683-b666-4460bbaed1a1/kube-rbac-proxy/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.034603 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-zcqns_ca08d810-a5ec-4683-b666-4460bbaed1a1/controller/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.189022 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-frr-files/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.332099 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-reloader/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.338039 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-frr-files/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.385796 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-metrics/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.412218 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-reloader/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.488880 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c7e8355-2930-423f-88d7-7830bc0382a1" path="/var/lib/kubelet/pods/1c7e8355-2930-423f-88d7-7830bc0382a1/volumes" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.535707 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-reloader/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.564863 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-frr-files/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.589805 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-metrics/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.660877 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-metrics/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.779781 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-frr-files/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.797636 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-metrics/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.824057 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/cp-reloader/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.847676 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/controller/0.log" Dec 03 20:47:34 crc kubenswrapper[4916]: I1203 20:47:34.983834 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/frr-metrics/0.log" Dec 03 20:47:35 crc kubenswrapper[4916]: I1203 20:47:35.054636 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/kube-rbac-proxy-frr/0.log" Dec 03 20:47:35 crc kubenswrapper[4916]: I1203 20:47:35.058990 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/kube-rbac-proxy/0.log" Dec 03 20:47:35 crc kubenswrapper[4916]: I1203 20:47:35.203832 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/reloader/0.log" Dec 03 20:47:35 crc kubenswrapper[4916]: I1203 20:47:35.322517 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-42zxv_92d25015-4495-4c5a-a65d-e8027a8a1a00/frr-k8s-webhook-server/0.log" Dec 03 20:47:35 crc kubenswrapper[4916]: I1203 20:47:35.518073 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-f96f55954-vbv4j_75320d0a-a179-4fd1-8e6c-46dd6a8e88b1/manager/0.log" Dec 03 20:47:35 crc kubenswrapper[4916]: I1203 20:47:35.724833 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6647df69ff-4xrgb_b608b106-b706-4e13-9e78-6962d5346432/webhook-server/0.log" Dec 03 20:47:35 crc kubenswrapper[4916]: I1203 20:47:35.817462 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nrs5t_4fefc703-1f37-4d7f-a4cd-54415e811abe/kube-rbac-proxy/0.log" Dec 03 20:47:36 crc kubenswrapper[4916]: I1203 20:47:36.399602 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nrs5t_4fefc703-1f37-4d7f-a4cd-54415e811abe/speaker/0.log" Dec 03 20:47:36 crc kubenswrapper[4916]: I1203 20:47:36.547302 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-qkz6f_8a708d96-5e34-4479-83c4-90bfd3eb2e80/frr/0.log" Dec 03 20:47:52 crc kubenswrapper[4916]: I1203 20:47:52.173248 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/util/0.log" Dec 03 20:47:52 crc kubenswrapper[4916]: I1203 20:47:52.332882 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/pull/0.log" Dec 03 20:47:52 crc kubenswrapper[4916]: I1203 20:47:52.358702 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/util/0.log" Dec 03 20:47:52 crc kubenswrapper[4916]: I1203 20:47:52.408002 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/pull/0.log" Dec 03 20:47:52 crc kubenswrapper[4916]: I1203 20:47:52.493452 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/pull/0.log" Dec 03 20:47:52 crc kubenswrapper[4916]: I1203 20:47:52.506775 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/util/0.log" Dec 03 20:47:52 crc kubenswrapper[4916]: I1203 20:47:52.516180 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fd7bl7_dfdad637-ca55-49e9-8065-75c8d2871739/extract/0.log" Dec 03 20:47:52 crc kubenswrapper[4916]: I1203 20:47:52.677262 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/util/0.log" Dec 03 20:47:52 crc kubenswrapper[4916]: I1203 20:47:52.827213 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/pull/0.log" Dec 03 20:47:52 crc kubenswrapper[4916]: I1203 20:47:52.863203 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/util/0.log" Dec 03 20:47:52 crc kubenswrapper[4916]: I1203 20:47:52.887828 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/pull/0.log" Dec 03 20:47:53 crc kubenswrapper[4916]: I1203 20:47:53.358231 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/util/0.log" Dec 03 20:47:53 crc kubenswrapper[4916]: I1203 20:47:53.411263 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/pull/0.log" Dec 03 20:47:53 crc kubenswrapper[4916]: I1203 20:47:53.419898 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210f6w9c_b22be183-8473-4ebc-a31b-0e219064f8b2/extract/0.log" Dec 03 20:47:53 crc kubenswrapper[4916]: I1203 20:47:53.551840 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/util/0.log" Dec 03 20:47:53 crc kubenswrapper[4916]: I1203 20:47:53.728605 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/util/0.log" Dec 03 20:47:53 crc kubenswrapper[4916]: I1203 20:47:53.735597 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/pull/0.log" Dec 03 20:47:53 crc kubenswrapper[4916]: I1203 20:47:53.748248 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/pull/0.log" Dec 03 20:47:53 crc kubenswrapper[4916]: I1203 20:47:53.900644 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/util/0.log" Dec 03 20:47:53 crc kubenswrapper[4916]: I1203 20:47:53.918077 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/pull/0.log" Dec 03 20:47:53 crc kubenswrapper[4916]: I1203 20:47:53.931501 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dbfd4_e7a6167d-5055-41db-b447-c72af54b8f9b/extract/0.log" Dec 03 20:47:54 crc kubenswrapper[4916]: I1203 20:47:54.090935 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-utilities/0.log" Dec 03 20:47:54 crc kubenswrapper[4916]: I1203 20:47:54.260049 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-utilities/0.log" Dec 03 20:47:54 crc kubenswrapper[4916]: I1203 20:47:54.316695 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-content/0.log" Dec 03 20:47:54 crc kubenswrapper[4916]: I1203 20:47:54.321971 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-content/0.log" Dec 03 20:47:54 crc kubenswrapper[4916]: I1203 20:47:54.472451 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-utilities/0.log" Dec 03 20:47:54 crc kubenswrapper[4916]: I1203 20:47:54.487057 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/extract-content/0.log" Dec 03 20:47:54 crc kubenswrapper[4916]: I1203 20:47:54.733091 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-utilities/0.log" Dec 03 20:47:55 crc kubenswrapper[4916]: I1203 20:47:55.155092 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-h82r9_148fa9af-6094-448d-9c20-267ce0e3b04f/registry-server/0.log" Dec 03 20:47:55 crc kubenswrapper[4916]: I1203 20:47:55.268005 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-utilities/0.log" Dec 03 20:47:55 crc kubenswrapper[4916]: I1203 20:47:55.269481 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-content/0.log" Dec 03 20:47:55 crc kubenswrapper[4916]: I1203 20:47:55.302910 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-content/0.log" Dec 03 20:47:55 crc kubenswrapper[4916]: I1203 20:47:55.464288 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-utilities/0.log" Dec 03 20:47:55 crc kubenswrapper[4916]: I1203 20:47:55.517191 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/extract-content/0.log" Dec 03 20:47:55 crc kubenswrapper[4916]: I1203 20:47:55.526871 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-rpvxq_23722fcc-7804-4705-9180-4dbd53a7e0e9/marketplace-operator/0.log" Dec 03 20:47:55 crc kubenswrapper[4916]: I1203 20:47:55.745466 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-utilities/0.log" Dec 03 20:47:55 crc kubenswrapper[4916]: I1203 20:47:55.929890 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-utilities/0.log" Dec 03 20:47:55 crc kubenswrapper[4916]: I1203 20:47:55.962076 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-content/0.log" Dec 03 20:47:56 crc kubenswrapper[4916]: I1203 20:47:56.014261 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-content/0.log" Dec 03 20:47:56 crc kubenswrapper[4916]: I1203 20:47:56.131592 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gwmvs_074f71e8-1f93-48a6-9777-4d6450cd4989/registry-server/0.log" Dec 03 20:47:56 crc kubenswrapper[4916]: I1203 20:47:56.170637 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-content/0.log" Dec 03 20:47:56 crc kubenswrapper[4916]: I1203 20:47:56.198274 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/extract-utilities/0.log" Dec 03 20:47:56 crc kubenswrapper[4916]: I1203 20:47:56.315506 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w94d6_3b2ecd5f-6381-4928-a111-7e17927c6096/registry-server/0.log" Dec 03 20:47:56 crc kubenswrapper[4916]: I1203 20:47:56.346164 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-utilities/0.log" Dec 03 20:47:56 crc kubenswrapper[4916]: I1203 20:47:56.469534 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-utilities/0.log" Dec 03 20:47:56 crc kubenswrapper[4916]: I1203 20:47:56.473509 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-content/0.log" Dec 03 20:47:56 crc kubenswrapper[4916]: I1203 20:47:56.474096 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-content/0.log" Dec 03 20:47:56 crc kubenswrapper[4916]: I1203 20:47:56.652432 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-content/0.log" Dec 03 20:47:56 crc kubenswrapper[4916]: I1203 20:47:56.652997 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/extract-utilities/0.log" Dec 03 20:47:57 crc kubenswrapper[4916]: I1203 20:47:57.136238 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-rrq2j_a2abaaf2-62a8-4c8b-8a2f-0c5cc3142d73/registry-server/0.log" Dec 03 20:48:11 crc kubenswrapper[4916]: I1203 20:48:11.291646 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-z8g4j_e7cb08b3-e6d4-4165-ba93-b35ed50108c7/prometheus-operator/0.log" Dec 03 20:48:11 crc kubenswrapper[4916]: I1203 20:48:11.434284 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-664d78cbdd-52kvt_b9a62187-a514-4067-8eae-ed64cd6daa76/prometheus-operator-admission-webhook/0.log" Dec 03 20:48:11 crc kubenswrapper[4916]: I1203 20:48:11.732465 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-664d78cbdd-chrq5_8b910b03-47fc-4dff-87ca-eed3318f67e5/prometheus-operator-admission-webhook/0.log" Dec 03 20:48:11 crc kubenswrapper[4916]: I1203 20:48:11.860707 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-gl8qq_02c05089-1da5-466d-ae93-bd7b99d6cba4/operator/0.log" Dec 03 20:48:11 crc kubenswrapper[4916]: I1203 20:48:11.928490 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-5cbf2_4122a230-b1b6-4725-b02a-a0829dfa4f3e/perses-operator/0.log" Dec 03 20:48:18 crc kubenswrapper[4916]: E1203 20:48:18.983821 4916 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.175:36584->38.102.83.175:36291: write tcp 38.102.83.175:36584->38.102.83.175:36291: write: broken pipe Dec 03 20:48:34 crc kubenswrapper[4916]: E1203 20:48:34.169221 4916 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.175:48588->38.102.83.175:36291: write tcp 38.102.83.175:48588->38.102.83.175:36291: write: broken pipe Dec 03 20:48:36 crc kubenswrapper[4916]: E1203 20:48:36.243978 4916 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.175:48666->38.102.83.175:36291: write tcp 38.102.83.175:48666->38.102.83.175:36291: write: broken pipe Dec 03 20:49:16 crc kubenswrapper[4916]: I1203 20:49:16.159038 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:49:16 crc kubenswrapper[4916]: I1203 20:49:16.159660 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:49:40 crc kubenswrapper[4916]: I1203 20:49:40.554679 4916 generic.go:334] "Generic (PLEG): container finished" podID="28ecef4e-33a3-44b0-8223-102eb3ec2aed" containerID="912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d" exitCode=0 Dec 03 20:49:40 crc kubenswrapper[4916]: I1203 20:49:40.554804 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-99lp5/must-gather-b6rfh" event={"ID":"28ecef4e-33a3-44b0-8223-102eb3ec2aed","Type":"ContainerDied","Data":"912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d"} Dec 03 20:49:40 crc kubenswrapper[4916]: I1203 20:49:40.557256 4916 scope.go:117] "RemoveContainer" containerID="912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d" Dec 03 20:49:41 crc kubenswrapper[4916]: I1203 20:49:41.265127 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-99lp5_must-gather-b6rfh_28ecef4e-33a3-44b0-8223-102eb3ec2aed/gather/0.log" Dec 03 20:49:46 crc kubenswrapper[4916]: I1203 20:49:46.159509 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:49:46 crc kubenswrapper[4916]: I1203 20:49:46.160095 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:49:50 crc kubenswrapper[4916]: I1203 20:49:50.719122 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-99lp5/must-gather-b6rfh"] Dec 03 20:49:50 crc kubenswrapper[4916]: I1203 20:49:50.719891 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-99lp5/must-gather-b6rfh" podUID="28ecef4e-33a3-44b0-8223-102eb3ec2aed" containerName="copy" containerID="cri-o://cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928" gracePeriod=2 Dec 03 20:49:50 crc kubenswrapper[4916]: I1203 20:49:50.721699 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-99lp5/must-gather-b6rfh"] Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.682677 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-99lp5_must-gather-b6rfh_28ecef4e-33a3-44b0-8223-102eb3ec2aed/copy/0.log" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.683642 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/must-gather-b6rfh" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.713934 4916 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-99lp5_must-gather-b6rfh_28ecef4e-33a3-44b0-8223-102eb3ec2aed/copy/0.log" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.714338 4916 generic.go:334] "Generic (PLEG): container finished" podID="28ecef4e-33a3-44b0-8223-102eb3ec2aed" containerID="cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928" exitCode=143 Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.714407 4916 scope.go:117] "RemoveContainer" containerID="cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.714600 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-99lp5/must-gather-b6rfh" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.745059 4916 scope.go:117] "RemoveContainer" containerID="912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.827376 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/28ecef4e-33a3-44b0-8223-102eb3ec2aed-must-gather-output\") pod \"28ecef4e-33a3-44b0-8223-102eb3ec2aed\" (UID: \"28ecef4e-33a3-44b0-8223-102eb3ec2aed\") " Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.827527 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvxhx\" (UniqueName: \"kubernetes.io/projected/28ecef4e-33a3-44b0-8223-102eb3ec2aed-kube-api-access-rvxhx\") pod \"28ecef4e-33a3-44b0-8223-102eb3ec2aed\" (UID: \"28ecef4e-33a3-44b0-8223-102eb3ec2aed\") " Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.834324 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28ecef4e-33a3-44b0-8223-102eb3ec2aed-kube-api-access-rvxhx" (OuterVolumeSpecName: "kube-api-access-rvxhx") pod "28ecef4e-33a3-44b0-8223-102eb3ec2aed" (UID: "28ecef4e-33a3-44b0-8223-102eb3ec2aed"). InnerVolumeSpecName "kube-api-access-rvxhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.847734 4916 scope.go:117] "RemoveContainer" containerID="cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928" Dec 03 20:49:51 crc kubenswrapper[4916]: E1203 20:49:51.851676 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928\": container with ID starting with cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928 not found: ID does not exist" containerID="cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.851721 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928"} err="failed to get container status \"cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928\": rpc error: code = NotFound desc = could not find container \"cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928\": container with ID starting with cad625020533f96b18966b47f58d71b29f2d267725109e5c9f67adbe068a3928 not found: ID does not exist" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.851744 4916 scope.go:117] "RemoveContainer" containerID="912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d" Dec 03 20:49:51 crc kubenswrapper[4916]: E1203 20:49:51.852988 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d\": container with ID starting with 912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d not found: ID does not exist" containerID="912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.853014 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d"} err="failed to get container status \"912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d\": rpc error: code = NotFound desc = could not find container \"912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d\": container with ID starting with 912d25ecf7c9727546ca0cab9f6bf8c672a8799ef5c88f452a9c94f6aa12e09d not found: ID does not exist" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.931882 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvxhx\" (UniqueName: \"kubernetes.io/projected/28ecef4e-33a3-44b0-8223-102eb3ec2aed-kube-api-access-rvxhx\") on node \"crc\" DevicePath \"\"" Dec 03 20:49:51 crc kubenswrapper[4916]: I1203 20:49:51.991130 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28ecef4e-33a3-44b0-8223-102eb3ec2aed-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "28ecef4e-33a3-44b0-8223-102eb3ec2aed" (UID: "28ecef4e-33a3-44b0-8223-102eb3ec2aed"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:49:52 crc kubenswrapper[4916]: I1203 20:49:52.033853 4916 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/28ecef4e-33a3-44b0-8223-102eb3ec2aed-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 20:49:52 crc kubenswrapper[4916]: I1203 20:49:52.490226 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28ecef4e-33a3-44b0-8223-102eb3ec2aed" path="/var/lib/kubelet/pods/28ecef4e-33a3-44b0-8223-102eb3ec2aed/volumes" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.292345 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m5vn6"] Dec 03 20:49:53 crc kubenswrapper[4916]: E1203 20:49:53.292799 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c7e8355-2930-423f-88d7-7830bc0382a1" containerName="registry-server" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.292816 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c7e8355-2930-423f-88d7-7830bc0382a1" containerName="registry-server" Dec 03 20:49:53 crc kubenswrapper[4916]: E1203 20:49:53.292862 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28ecef4e-33a3-44b0-8223-102eb3ec2aed" containerName="copy" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.292872 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="28ecef4e-33a3-44b0-8223-102eb3ec2aed" containerName="copy" Dec 03 20:49:53 crc kubenswrapper[4916]: E1203 20:49:53.292891 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c7e8355-2930-423f-88d7-7830bc0382a1" containerName="extract-content" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.292899 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c7e8355-2930-423f-88d7-7830bc0382a1" containerName="extract-content" Dec 03 20:49:53 crc kubenswrapper[4916]: E1203 20:49:53.292921 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28ecef4e-33a3-44b0-8223-102eb3ec2aed" containerName="gather" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.292929 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="28ecef4e-33a3-44b0-8223-102eb3ec2aed" containerName="gather" Dec 03 20:49:53 crc kubenswrapper[4916]: E1203 20:49:53.292942 4916 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c7e8355-2930-423f-88d7-7830bc0382a1" containerName="extract-utilities" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.292950 4916 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c7e8355-2930-423f-88d7-7830bc0382a1" containerName="extract-utilities" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.293183 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="28ecef4e-33a3-44b0-8223-102eb3ec2aed" containerName="gather" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.293207 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="28ecef4e-33a3-44b0-8223-102eb3ec2aed" containerName="copy" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.293231 4916 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c7e8355-2930-423f-88d7-7830bc0382a1" containerName="registry-server" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.294926 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.303461 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m5vn6"] Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.462034 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcqsj\" (UniqueName: \"kubernetes.io/projected/5997b46b-f6a9-4dc4-b653-63098d438a00-kube-api-access-rcqsj\") pod \"redhat-marketplace-m5vn6\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.462097 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-catalog-content\") pod \"redhat-marketplace-m5vn6\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.462126 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-utilities\") pod \"redhat-marketplace-m5vn6\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.563839 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcqsj\" (UniqueName: \"kubernetes.io/projected/5997b46b-f6a9-4dc4-b653-63098d438a00-kube-api-access-rcqsj\") pod \"redhat-marketplace-m5vn6\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.564214 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-catalog-content\") pod \"redhat-marketplace-m5vn6\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.564243 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-utilities\") pod \"redhat-marketplace-m5vn6\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.564705 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-catalog-content\") pod \"redhat-marketplace-m5vn6\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:49:53 crc kubenswrapper[4916]: I1203 20:49:53.566035 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-utilities\") pod \"redhat-marketplace-m5vn6\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:49:54 crc kubenswrapper[4916]: I1203 20:49:54.096323 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcqsj\" (UniqueName: \"kubernetes.io/projected/5997b46b-f6a9-4dc4-b653-63098d438a00-kube-api-access-rcqsj\") pod \"redhat-marketplace-m5vn6\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:49:54 crc kubenswrapper[4916]: I1203 20:49:54.262136 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:49:54 crc kubenswrapper[4916]: I1203 20:49:54.807916 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m5vn6"] Dec 03 20:49:54 crc kubenswrapper[4916]: W1203 20:49:54.814837 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5997b46b_f6a9_4dc4_b653_63098d438a00.slice/crio-17d97a2f83c4a196137f5e8053ce2c305268355facfb763ca65acfa3cabf0256 WatchSource:0}: Error finding container 17d97a2f83c4a196137f5e8053ce2c305268355facfb763ca65acfa3cabf0256: Status 404 returned error can't find the container with id 17d97a2f83c4a196137f5e8053ce2c305268355facfb763ca65acfa3cabf0256 Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.100183 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8t8cg"] Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.104447 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.112399 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8t8cg"] Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.200608 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-catalog-content\") pod \"redhat-operators-8t8cg\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.200672 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-utilities\") pod \"redhat-operators-8t8cg\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.201022 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98zb9\" (UniqueName: \"kubernetes.io/projected/4bd434eb-924d-45df-9a6b-59a10c44e172-kube-api-access-98zb9\") pod \"redhat-operators-8t8cg\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.303627 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-catalog-content\") pod \"redhat-operators-8t8cg\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.303699 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-utilities\") pod \"redhat-operators-8t8cg\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.303805 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98zb9\" (UniqueName: \"kubernetes.io/projected/4bd434eb-924d-45df-9a6b-59a10c44e172-kube-api-access-98zb9\") pod \"redhat-operators-8t8cg\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.304283 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-catalog-content\") pod \"redhat-operators-8t8cg\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.304299 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-utilities\") pod \"redhat-operators-8t8cg\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.324935 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98zb9\" (UniqueName: \"kubernetes.io/projected/4bd434eb-924d-45df-9a6b-59a10c44e172-kube-api-access-98zb9\") pod \"redhat-operators-8t8cg\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.426672 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.706991 4916 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-p75kv"] Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.709246 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.728630 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p75kv"] Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.759110 4916 generic.go:334] "Generic (PLEG): container finished" podID="5997b46b-f6a9-4dc4-b653-63098d438a00" containerID="aceb42ab8eee483970a1ccd4bf046b2a40430e0da27bd894458bae4f43fc25b3" exitCode=0 Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.759170 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5vn6" event={"ID":"5997b46b-f6a9-4dc4-b653-63098d438a00","Type":"ContainerDied","Data":"aceb42ab8eee483970a1ccd4bf046b2a40430e0da27bd894458bae4f43fc25b3"} Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.759213 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5vn6" event={"ID":"5997b46b-f6a9-4dc4-b653-63098d438a00","Type":"ContainerStarted","Data":"17d97a2f83c4a196137f5e8053ce2c305268355facfb763ca65acfa3cabf0256"} Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.813439 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-utilities\") pod \"community-operators-p75kv\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.813593 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-catalog-content\") pod \"community-operators-p75kv\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.813694 4916 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkpkd\" (UniqueName: \"kubernetes.io/projected/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-kube-api-access-rkpkd\") pod \"community-operators-p75kv\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.915164 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-catalog-content\") pod \"community-operators-p75kv\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.915428 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkpkd\" (UniqueName: \"kubernetes.io/projected/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-kube-api-access-rkpkd\") pod \"community-operators-p75kv\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.915597 4916 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-utilities\") pod \"community-operators-p75kv\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.915439 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8t8cg"] Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.915904 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-catalog-content\") pod \"community-operators-p75kv\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:49:55 crc kubenswrapper[4916]: I1203 20:49:55.916136 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-utilities\") pod \"community-operators-p75kv\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:49:56 crc kubenswrapper[4916]: I1203 20:49:56.196197 4916 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkpkd\" (UniqueName: \"kubernetes.io/projected/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-kube-api-access-rkpkd\") pod \"community-operators-p75kv\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:49:56 crc kubenswrapper[4916]: W1203 20:49:56.200459 4916 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4bd434eb_924d_45df_9a6b_59a10c44e172.slice/crio-8c7ad10b1cbb7258e60227c3d0d86bcede9305673db25320f26c565d5b7a79cc WatchSource:0}: Error finding container 8c7ad10b1cbb7258e60227c3d0d86bcede9305673db25320f26c565d5b7a79cc: Status 404 returned error can't find the container with id 8c7ad10b1cbb7258e60227c3d0d86bcede9305673db25320f26c565d5b7a79cc Dec 03 20:49:56 crc kubenswrapper[4916]: I1203 20:49:56.331396 4916 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:49:56 crc kubenswrapper[4916]: I1203 20:49:56.769102 4916 generic.go:334] "Generic (PLEG): container finished" podID="4bd434eb-924d-45df-9a6b-59a10c44e172" containerID="5a99fccf22068ca71655ef644265c849f81572ca247e12f8737102a8fbd3d338" exitCode=0 Dec 03 20:49:56 crc kubenswrapper[4916]: I1203 20:49:56.769245 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t8cg" event={"ID":"4bd434eb-924d-45df-9a6b-59a10c44e172","Type":"ContainerDied","Data":"5a99fccf22068ca71655ef644265c849f81572ca247e12f8737102a8fbd3d338"} Dec 03 20:49:56 crc kubenswrapper[4916]: I1203 20:49:56.769406 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t8cg" event={"ID":"4bd434eb-924d-45df-9a6b-59a10c44e172","Type":"ContainerStarted","Data":"8c7ad10b1cbb7258e60227c3d0d86bcede9305673db25320f26c565d5b7a79cc"} Dec 03 20:49:56 crc kubenswrapper[4916]: I1203 20:49:56.771996 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5vn6" event={"ID":"5997b46b-f6a9-4dc4-b653-63098d438a00","Type":"ContainerStarted","Data":"63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33"} Dec 03 20:49:56 crc kubenswrapper[4916]: I1203 20:49:56.900833 4916 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p75kv"] Dec 03 20:49:57 crc kubenswrapper[4916]: I1203 20:49:57.785998 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t8cg" event={"ID":"4bd434eb-924d-45df-9a6b-59a10c44e172","Type":"ContainerStarted","Data":"d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0"} Dec 03 20:49:57 crc kubenswrapper[4916]: I1203 20:49:57.788705 4916 generic.go:334] "Generic (PLEG): container finished" podID="5997b46b-f6a9-4dc4-b653-63098d438a00" containerID="63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33" exitCode=0 Dec 03 20:49:57 crc kubenswrapper[4916]: I1203 20:49:57.788748 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5vn6" event={"ID":"5997b46b-f6a9-4dc4-b653-63098d438a00","Type":"ContainerDied","Data":"63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33"} Dec 03 20:49:57 crc kubenswrapper[4916]: I1203 20:49:57.791607 4916 generic.go:334] "Generic (PLEG): container finished" podID="03d0f73c-f49b-4280-b1a0-d3a3319b5fd4" containerID="1985a5d0072bd1660cf69680d0f8c82ae34314458ead3c477707a2cd067aedbb" exitCode=0 Dec 03 20:49:57 crc kubenswrapper[4916]: I1203 20:49:57.791647 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p75kv" event={"ID":"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4","Type":"ContainerDied","Data":"1985a5d0072bd1660cf69680d0f8c82ae34314458ead3c477707a2cd067aedbb"} Dec 03 20:49:57 crc kubenswrapper[4916]: I1203 20:49:57.791674 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p75kv" event={"ID":"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4","Type":"ContainerStarted","Data":"9dccf33e09e66e8ff647d5b0ae9b80680de35f294c08a15367c1a515a1b2ae34"} Dec 03 20:49:58 crc kubenswrapper[4916]: I1203 20:49:58.804314 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5vn6" event={"ID":"5997b46b-f6a9-4dc4-b653-63098d438a00","Type":"ContainerStarted","Data":"d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916"} Dec 03 20:49:58 crc kubenswrapper[4916]: I1203 20:49:58.807921 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p75kv" event={"ID":"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4","Type":"ContainerStarted","Data":"cdac7a3a4056c64685a951d5ec45cb3530e40fe353d1d12fbeee25d1bf2b2a56"} Dec 03 20:49:58 crc kubenswrapper[4916]: I1203 20:49:58.834911 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m5vn6" podStartSLOduration=3.423686937 podStartE2EDuration="5.834880606s" podCreationTimestamp="2025-12-03 20:49:53 +0000 UTC" firstStartedPulling="2025-12-03 20:49:55.761664313 +0000 UTC m=+4811.724474579" lastFinishedPulling="2025-12-03 20:49:58.172857982 +0000 UTC m=+4814.135668248" observedRunningTime="2025-12-03 20:49:58.82477098 +0000 UTC m=+4814.787581256" watchObservedRunningTime="2025-12-03 20:49:58.834880606 +0000 UTC m=+4814.797690912" Dec 03 20:49:59 crc kubenswrapper[4916]: I1203 20:49:59.820886 4916 generic.go:334] "Generic (PLEG): container finished" podID="03d0f73c-f49b-4280-b1a0-d3a3319b5fd4" containerID="cdac7a3a4056c64685a951d5ec45cb3530e40fe353d1d12fbeee25d1bf2b2a56" exitCode=0 Dec 03 20:49:59 crc kubenswrapper[4916]: I1203 20:49:59.821675 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p75kv" event={"ID":"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4","Type":"ContainerDied","Data":"cdac7a3a4056c64685a951d5ec45cb3530e40fe353d1d12fbeee25d1bf2b2a56"} Dec 03 20:49:59 crc kubenswrapper[4916]: I1203 20:49:59.824248 4916 generic.go:334] "Generic (PLEG): container finished" podID="4bd434eb-924d-45df-9a6b-59a10c44e172" containerID="d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0" exitCode=0 Dec 03 20:49:59 crc kubenswrapper[4916]: I1203 20:49:59.824724 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t8cg" event={"ID":"4bd434eb-924d-45df-9a6b-59a10c44e172","Type":"ContainerDied","Data":"d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0"} Dec 03 20:50:00 crc kubenswrapper[4916]: I1203 20:50:00.835993 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p75kv" event={"ID":"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4","Type":"ContainerStarted","Data":"782a6f5e4a18a270d27cf4adabc061a249b26e43bbb441ad5072879752900a78"} Dec 03 20:50:00 crc kubenswrapper[4916]: I1203 20:50:00.839972 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t8cg" event={"ID":"4bd434eb-924d-45df-9a6b-59a10c44e172","Type":"ContainerStarted","Data":"170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3"} Dec 03 20:50:00 crc kubenswrapper[4916]: I1203 20:50:00.873532 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-p75kv" podStartSLOduration=3.440783145 podStartE2EDuration="5.873505452s" podCreationTimestamp="2025-12-03 20:49:55 +0000 UTC" firstStartedPulling="2025-12-03 20:49:57.793211213 +0000 UTC m=+4813.756021479" lastFinishedPulling="2025-12-03 20:50:00.22593351 +0000 UTC m=+4816.188743786" observedRunningTime="2025-12-03 20:50:00.856496364 +0000 UTC m=+4816.819306650" watchObservedRunningTime="2025-12-03 20:50:00.873505452 +0000 UTC m=+4816.836315738" Dec 03 20:50:00 crc kubenswrapper[4916]: I1203 20:50:00.885289 4916 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8t8cg" podStartSLOduration=2.413325837 podStartE2EDuration="5.885268122s" podCreationTimestamp="2025-12-03 20:49:55 +0000 UTC" firstStartedPulling="2025-12-03 20:49:56.770759768 +0000 UTC m=+4812.733570034" lastFinishedPulling="2025-12-03 20:50:00.242702043 +0000 UTC m=+4816.205512319" observedRunningTime="2025-12-03 20:50:00.871051368 +0000 UTC m=+4816.833861654" watchObservedRunningTime="2025-12-03 20:50:00.885268122 +0000 UTC m=+4816.848078408" Dec 03 20:50:04 crc kubenswrapper[4916]: I1203 20:50:04.262823 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:50:04 crc kubenswrapper[4916]: I1203 20:50:04.263981 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:50:04 crc kubenswrapper[4916]: I1203 20:50:04.371197 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:50:04 crc kubenswrapper[4916]: I1203 20:50:04.968333 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:50:05 crc kubenswrapper[4916]: I1203 20:50:05.427501 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:50:05 crc kubenswrapper[4916]: I1203 20:50:05.427903 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:50:05 crc kubenswrapper[4916]: I1203 20:50:05.683856 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m5vn6"] Dec 03 20:50:06 crc kubenswrapper[4916]: I1203 20:50:06.331939 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:50:06 crc kubenswrapper[4916]: I1203 20:50:06.332012 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:50:06 crc kubenswrapper[4916]: I1203 20:50:06.408213 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:50:06 crc kubenswrapper[4916]: I1203 20:50:06.494373 4916 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8t8cg" podUID="4bd434eb-924d-45df-9a6b-59a10c44e172" containerName="registry-server" probeResult="failure" output=< Dec 03 20:50:06 crc kubenswrapper[4916]: timeout: failed to connect service ":50051" within 1s Dec 03 20:50:06 crc kubenswrapper[4916]: > Dec 03 20:50:06 crc kubenswrapper[4916]: I1203 20:50:06.916696 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m5vn6" podUID="5997b46b-f6a9-4dc4-b653-63098d438a00" containerName="registry-server" containerID="cri-o://d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916" gracePeriod=2 Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.004119 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.466812 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.601048 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-catalog-content\") pod \"5997b46b-f6a9-4dc4-b653-63098d438a00\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.601129 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcqsj\" (UniqueName: \"kubernetes.io/projected/5997b46b-f6a9-4dc4-b653-63098d438a00-kube-api-access-rcqsj\") pod \"5997b46b-f6a9-4dc4-b653-63098d438a00\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.601365 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-utilities\") pod \"5997b46b-f6a9-4dc4-b653-63098d438a00\" (UID: \"5997b46b-f6a9-4dc4-b653-63098d438a00\") " Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.602655 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-utilities" (OuterVolumeSpecName: "utilities") pod "5997b46b-f6a9-4dc4-b653-63098d438a00" (UID: "5997b46b-f6a9-4dc4-b653-63098d438a00"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.609811 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5997b46b-f6a9-4dc4-b653-63098d438a00-kube-api-access-rcqsj" (OuterVolumeSpecName: "kube-api-access-rcqsj") pod "5997b46b-f6a9-4dc4-b653-63098d438a00" (UID: "5997b46b-f6a9-4dc4-b653-63098d438a00"). InnerVolumeSpecName "kube-api-access-rcqsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.618316 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5997b46b-f6a9-4dc4-b653-63098d438a00" (UID: "5997b46b-f6a9-4dc4-b653-63098d438a00"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.703396 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.703435 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5997b46b-f6a9-4dc4-b653-63098d438a00-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.703447 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcqsj\" (UniqueName: \"kubernetes.io/projected/5997b46b-f6a9-4dc4-b653-63098d438a00-kube-api-access-rcqsj\") on node \"crc\" DevicePath \"\"" Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.936445 4916 generic.go:334] "Generic (PLEG): container finished" podID="5997b46b-f6a9-4dc4-b653-63098d438a00" containerID="d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916" exitCode=0 Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.936534 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m5vn6" Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.936599 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5vn6" event={"ID":"5997b46b-f6a9-4dc4-b653-63098d438a00","Type":"ContainerDied","Data":"d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916"} Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.936668 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m5vn6" event={"ID":"5997b46b-f6a9-4dc4-b653-63098d438a00","Type":"ContainerDied","Data":"17d97a2f83c4a196137f5e8053ce2c305268355facfb763ca65acfa3cabf0256"} Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.936701 4916 scope.go:117] "RemoveContainer" containerID="d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916" Dec 03 20:50:07 crc kubenswrapper[4916]: I1203 20:50:07.986765 4916 scope.go:117] "RemoveContainer" containerID="63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33" Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.017183 4916 scope.go:117] "RemoveContainer" containerID="aceb42ab8eee483970a1ccd4bf046b2a40430e0da27bd894458bae4f43fc25b3" Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.054397 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m5vn6"] Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.068594 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m5vn6"] Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.079163 4916 scope.go:117] "RemoveContainer" containerID="d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916" Dec 03 20:50:08 crc kubenswrapper[4916]: E1203 20:50:08.079688 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916\": container with ID starting with d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916 not found: ID does not exist" containerID="d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916" Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.079736 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916"} err="failed to get container status \"d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916\": rpc error: code = NotFound desc = could not find container \"d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916\": container with ID starting with d3ffe755c2bceed1b3e02b94b173b456a5138e80054ca168be9e4d3e4c36d916 not found: ID does not exist" Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.079757 4916 scope.go:117] "RemoveContainer" containerID="63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33" Dec 03 20:50:08 crc kubenswrapper[4916]: E1203 20:50:08.080004 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33\": container with ID starting with 63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33 not found: ID does not exist" containerID="63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33" Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.080024 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33"} err="failed to get container status \"63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33\": rpc error: code = NotFound desc = could not find container \"63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33\": container with ID starting with 63dccd670e4aaa556fde0d248bebf35cd59fa0f1ac5b58751cba3bc6afb77a33 not found: ID does not exist" Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.080039 4916 scope.go:117] "RemoveContainer" containerID="aceb42ab8eee483970a1ccd4bf046b2a40430e0da27bd894458bae4f43fc25b3" Dec 03 20:50:08 crc kubenswrapper[4916]: E1203 20:50:08.080207 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aceb42ab8eee483970a1ccd4bf046b2a40430e0da27bd894458bae4f43fc25b3\": container with ID starting with aceb42ab8eee483970a1ccd4bf046b2a40430e0da27bd894458bae4f43fc25b3 not found: ID does not exist" containerID="aceb42ab8eee483970a1ccd4bf046b2a40430e0da27bd894458bae4f43fc25b3" Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.080226 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aceb42ab8eee483970a1ccd4bf046b2a40430e0da27bd894458bae4f43fc25b3"} err="failed to get container status \"aceb42ab8eee483970a1ccd4bf046b2a40430e0da27bd894458bae4f43fc25b3\": rpc error: code = NotFound desc = could not find container \"aceb42ab8eee483970a1ccd4bf046b2a40430e0da27bd894458bae4f43fc25b3\": container with ID starting with aceb42ab8eee483970a1ccd4bf046b2a40430e0da27bd894458bae4f43fc25b3 not found: ID does not exist" Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.500115 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5997b46b-f6a9-4dc4-b653-63098d438a00" path="/var/lib/kubelet/pods/5997b46b-f6a9-4dc4-b653-63098d438a00/volumes" Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.684452 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p75kv"] Dec 03 20:50:08 crc kubenswrapper[4916]: I1203 20:50:08.949849 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-p75kv" podUID="03d0f73c-f49b-4280-b1a0-d3a3319b5fd4" containerName="registry-server" containerID="cri-o://782a6f5e4a18a270d27cf4adabc061a249b26e43bbb441ad5072879752900a78" gracePeriod=2 Dec 03 20:50:09 crc kubenswrapper[4916]: I1203 20:50:09.968325 4916 generic.go:334] "Generic (PLEG): container finished" podID="03d0f73c-f49b-4280-b1a0-d3a3319b5fd4" containerID="782a6f5e4a18a270d27cf4adabc061a249b26e43bbb441ad5072879752900a78" exitCode=0 Dec 03 20:50:09 crc kubenswrapper[4916]: I1203 20:50:09.968637 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p75kv" event={"ID":"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4","Type":"ContainerDied","Data":"782a6f5e4a18a270d27cf4adabc061a249b26e43bbb441ad5072879752900a78"} Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.193598 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.372379 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-utilities\") pod \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.372505 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-catalog-content\") pod \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.372583 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rkpkd\" (UniqueName: \"kubernetes.io/projected/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-kube-api-access-rkpkd\") pod \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\" (UID: \"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4\") " Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.374438 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-utilities" (OuterVolumeSpecName: "utilities") pod "03d0f73c-f49b-4280-b1a0-d3a3319b5fd4" (UID: "03d0f73c-f49b-4280-b1a0-d3a3319b5fd4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.391972 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-kube-api-access-rkpkd" (OuterVolumeSpecName: "kube-api-access-rkpkd") pod "03d0f73c-f49b-4280-b1a0-d3a3319b5fd4" (UID: "03d0f73c-f49b-4280-b1a0-d3a3319b5fd4"). InnerVolumeSpecName "kube-api-access-rkpkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.444995 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03d0f73c-f49b-4280-b1a0-d3a3319b5fd4" (UID: "03d0f73c-f49b-4280-b1a0-d3a3319b5fd4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.476213 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.476270 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.476292 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rkpkd\" (UniqueName: \"kubernetes.io/projected/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4-kube-api-access-rkpkd\") on node \"crc\" DevicePath \"\"" Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.980854 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p75kv" event={"ID":"03d0f73c-f49b-4280-b1a0-d3a3319b5fd4","Type":"ContainerDied","Data":"9dccf33e09e66e8ff647d5b0ae9b80680de35f294c08a15367c1a515a1b2ae34"} Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.981136 4916 scope.go:117] "RemoveContainer" containerID="782a6f5e4a18a270d27cf4adabc061a249b26e43bbb441ad5072879752900a78" Dec 03 20:50:10 crc kubenswrapper[4916]: I1203 20:50:10.980974 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p75kv" Dec 03 20:50:11 crc kubenswrapper[4916]: I1203 20:50:11.006433 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p75kv"] Dec 03 20:50:11 crc kubenswrapper[4916]: I1203 20:50:11.015724 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-p75kv"] Dec 03 20:50:11 crc kubenswrapper[4916]: I1203 20:50:11.017611 4916 scope.go:117] "RemoveContainer" containerID="cdac7a3a4056c64685a951d5ec45cb3530e40fe353d1d12fbeee25d1bf2b2a56" Dec 03 20:50:11 crc kubenswrapper[4916]: I1203 20:50:11.040255 4916 scope.go:117] "RemoveContainer" containerID="1985a5d0072bd1660cf69680d0f8c82ae34314458ead3c477707a2cd067aedbb" Dec 03 20:50:12 crc kubenswrapper[4916]: I1203 20:50:12.502472 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03d0f73c-f49b-4280-b1a0-d3a3319b5fd4" path="/var/lib/kubelet/pods/03d0f73c-f49b-4280-b1a0-d3a3319b5fd4/volumes" Dec 03 20:50:15 crc kubenswrapper[4916]: I1203 20:50:15.512822 4916 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:50:15 crc kubenswrapper[4916]: I1203 20:50:15.606106 4916 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:50:15 crc kubenswrapper[4916]: I1203 20:50:15.763256 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8t8cg"] Dec 03 20:50:16 crc kubenswrapper[4916]: I1203 20:50:16.158962 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:50:16 crc kubenswrapper[4916]: I1203 20:50:16.159055 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:50:16 crc kubenswrapper[4916]: I1203 20:50:16.159122 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 20:50:16 crc kubenswrapper[4916]: I1203 20:50:16.160273 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"41adc5ebb421bc69f1dce3db28abc216e5f622afd62161d7d8963c109aa5e64f"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 20:50:16 crc kubenswrapper[4916]: I1203 20:50:16.160381 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://41adc5ebb421bc69f1dce3db28abc216e5f622afd62161d7d8963c109aa5e64f" gracePeriod=600 Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.059404 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="41adc5ebb421bc69f1dce3db28abc216e5f622afd62161d7d8963c109aa5e64f" exitCode=0 Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.059500 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"41adc5ebb421bc69f1dce3db28abc216e5f622afd62161d7d8963c109aa5e64f"} Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.060299 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerStarted","Data":"0d9aea3248abbdf7fac9783bea4490e4ec70624eccb24616e9240ef2177bfb27"} Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.060328 4916 scope.go:117] "RemoveContainer" containerID="ca947775e5f108af6945a5d0c3aa7947e4f97427419f6f0f8f3ee25f26764247" Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.061633 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8t8cg" podUID="4bd434eb-924d-45df-9a6b-59a10c44e172" containerName="registry-server" containerID="cri-o://170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3" gracePeriod=2 Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.596752 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.739778 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98zb9\" (UniqueName: \"kubernetes.io/projected/4bd434eb-924d-45df-9a6b-59a10c44e172-kube-api-access-98zb9\") pod \"4bd434eb-924d-45df-9a6b-59a10c44e172\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.739958 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-catalog-content\") pod \"4bd434eb-924d-45df-9a6b-59a10c44e172\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.740035 4916 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-utilities\") pod \"4bd434eb-924d-45df-9a6b-59a10c44e172\" (UID: \"4bd434eb-924d-45df-9a6b-59a10c44e172\") " Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.741667 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-utilities" (OuterVolumeSpecName: "utilities") pod "4bd434eb-924d-45df-9a6b-59a10c44e172" (UID: "4bd434eb-924d-45df-9a6b-59a10c44e172"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.746667 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bd434eb-924d-45df-9a6b-59a10c44e172-kube-api-access-98zb9" (OuterVolumeSpecName: "kube-api-access-98zb9") pod "4bd434eb-924d-45df-9a6b-59a10c44e172" (UID: "4bd434eb-924d-45df-9a6b-59a10c44e172"). InnerVolumeSpecName "kube-api-access-98zb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.843119 4916 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98zb9\" (UniqueName: \"kubernetes.io/projected/4bd434eb-924d-45df-9a6b-59a10c44e172-kube-api-access-98zb9\") on node \"crc\" DevicePath \"\"" Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.843180 4916 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.858556 4916 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4bd434eb-924d-45df-9a6b-59a10c44e172" (UID: "4bd434eb-924d-45df-9a6b-59a10c44e172"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 20:50:17 crc kubenswrapper[4916]: I1203 20:50:17.944943 4916 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bd434eb-924d-45df-9a6b-59a10c44e172-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.084530 4916 generic.go:334] "Generic (PLEG): container finished" podID="4bd434eb-924d-45df-9a6b-59a10c44e172" containerID="170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3" exitCode=0 Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.084837 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t8cg" event={"ID":"4bd434eb-924d-45df-9a6b-59a10c44e172","Type":"ContainerDied","Data":"170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3"} Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.085024 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8t8cg" event={"ID":"4bd434eb-924d-45df-9a6b-59a10c44e172","Type":"ContainerDied","Data":"8c7ad10b1cbb7258e60227c3d0d86bcede9305673db25320f26c565d5b7a79cc"} Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.085049 4916 scope.go:117] "RemoveContainer" containerID="170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3" Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.085422 4916 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8t8cg" Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.116316 4916 scope.go:117] "RemoveContainer" containerID="d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0" Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.127190 4916 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8t8cg"] Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.138365 4916 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8t8cg"] Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.138423 4916 scope.go:117] "RemoveContainer" containerID="5a99fccf22068ca71655ef644265c849f81572ca247e12f8737102a8fbd3d338" Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.200893 4916 scope.go:117] "RemoveContainer" containerID="170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3" Dec 03 20:50:18 crc kubenswrapper[4916]: E1203 20:50:18.201419 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3\": container with ID starting with 170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3 not found: ID does not exist" containerID="170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3" Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.201469 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3"} err="failed to get container status \"170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3\": rpc error: code = NotFound desc = could not find container \"170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3\": container with ID starting with 170a7f3732d857ad335f47448d74a7575263a2d5b0865c98a18825ccae9f52e3 not found: ID does not exist" Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.201500 4916 scope.go:117] "RemoveContainer" containerID="d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0" Dec 03 20:50:18 crc kubenswrapper[4916]: E1203 20:50:18.201921 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0\": container with ID starting with d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0 not found: ID does not exist" containerID="d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0" Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.201965 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0"} err="failed to get container status \"d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0\": rpc error: code = NotFound desc = could not find container \"d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0\": container with ID starting with d48489ffd59a5f5237106251f2adaa694ca15b18f52af75b54b68b596f7a76b0 not found: ID does not exist" Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.202001 4916 scope.go:117] "RemoveContainer" containerID="5a99fccf22068ca71655ef644265c849f81572ca247e12f8737102a8fbd3d338" Dec 03 20:50:18 crc kubenswrapper[4916]: E1203 20:50:18.202504 4916 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a99fccf22068ca71655ef644265c849f81572ca247e12f8737102a8fbd3d338\": container with ID starting with 5a99fccf22068ca71655ef644265c849f81572ca247e12f8737102a8fbd3d338 not found: ID does not exist" containerID="5a99fccf22068ca71655ef644265c849f81572ca247e12f8737102a8fbd3d338" Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.202531 4916 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a99fccf22068ca71655ef644265c849f81572ca247e12f8737102a8fbd3d338"} err="failed to get container status \"5a99fccf22068ca71655ef644265c849f81572ca247e12f8737102a8fbd3d338\": rpc error: code = NotFound desc = could not find container \"5a99fccf22068ca71655ef644265c849f81572ca247e12f8737102a8fbd3d338\": container with ID starting with 5a99fccf22068ca71655ef644265c849f81572ca247e12f8737102a8fbd3d338 not found: ID does not exist" Dec 03 20:50:18 crc kubenswrapper[4916]: I1203 20:50:18.492935 4916 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bd434eb-924d-45df-9a6b-59a10c44e172" path="/var/lib/kubelet/pods/4bd434eb-924d-45df-9a6b-59a10c44e172/volumes" Dec 03 20:52:16 crc kubenswrapper[4916]: I1203 20:52:16.158825 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:52:16 crc kubenswrapper[4916]: I1203 20:52:16.159471 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:52:46 crc kubenswrapper[4916]: I1203 20:52:46.158457 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:52:46 crc kubenswrapper[4916]: I1203 20:52:46.159174 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:53:16 crc kubenswrapper[4916]: I1203 20:53:16.158986 4916 patch_prober.go:28] interesting pod/machine-config-daemon-q4hms container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 20:53:16 crc kubenswrapper[4916]: I1203 20:53:16.159828 4916 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 20:53:16 crc kubenswrapper[4916]: I1203 20:53:16.159906 4916 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" Dec 03 20:53:16 crc kubenswrapper[4916]: I1203 20:53:16.160963 4916 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0d9aea3248abbdf7fac9783bea4490e4ec70624eccb24616e9240ef2177bfb27"} pod="openshift-machine-config-operator/machine-config-daemon-q4hms" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 20:53:16 crc kubenswrapper[4916]: I1203 20:53:16.161083 4916 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerName="machine-config-daemon" containerID="cri-o://0d9aea3248abbdf7fac9783bea4490e4ec70624eccb24616e9240ef2177bfb27" gracePeriod=600 Dec 03 20:53:16 crc kubenswrapper[4916]: E1203 20:53:16.287226 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" Dec 03 20:53:16 crc kubenswrapper[4916]: I1203 20:53:16.661361 4916 generic.go:334] "Generic (PLEG): container finished" podID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" containerID="0d9aea3248abbdf7fac9783bea4490e4ec70624eccb24616e9240ef2177bfb27" exitCode=0 Dec 03 20:53:16 crc kubenswrapper[4916]: I1203 20:53:16.661412 4916 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" event={"ID":"5cc773ef-1b60-461f-a7ac-2b8a23a1d04f","Type":"ContainerDied","Data":"0d9aea3248abbdf7fac9783bea4490e4ec70624eccb24616e9240ef2177bfb27"} Dec 03 20:53:16 crc kubenswrapper[4916]: I1203 20:53:16.661450 4916 scope.go:117] "RemoveContainer" containerID="41adc5ebb421bc69f1dce3db28abc216e5f622afd62161d7d8963c109aa5e64f" Dec 03 20:53:16 crc kubenswrapper[4916]: I1203 20:53:16.662690 4916 scope.go:117] "RemoveContainer" containerID="0d9aea3248abbdf7fac9783bea4490e4ec70624eccb24616e9240ef2177bfb27" Dec 03 20:53:16 crc kubenswrapper[4916]: E1203 20:53:16.663126 4916 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q4hms_openshift-machine-config-operator(5cc773ef-1b60-461f-a7ac-2b8a23a1d04f)\"" pod="openshift-machine-config-operator/machine-config-daemon-q4hms" podUID="5cc773ef-1b60-461f-a7ac-2b8a23a1d04f" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114121507024442 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114121510017351 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114107326016505 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114107327015456 5ustar corecore